├── .gitignore ├── .travis.yml ├── LICENSE ├── bench_test.go ├── dateparse ├── main.go └── README.md ├── example └── main.go ├── README.md ├── parseany_test.go └── parseany.go /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | .vscode/launch.json 3 | example/debug 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: go 2 | 3 | go: 4 | - 1.8.x 5 | - tip 6 | 7 | before_install: 8 | - go get -t -v ./... 9 | 10 | script: 11 | - go test -race -coverprofile=coverage.txt -covermode=atomic 12 | 13 | after_success: 14 | - bash <(curl -s https://codecov.io/bash) -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015-2017 Aaron Raddon 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /bench_test.go: -------------------------------------------------------------------------------- 1 | package dateparse 2 | 3 | import ( 4 | "fmt" 5 | "testing" 6 | "time" 7 | ) 8 | 9 | /* 10 | 11 | go test -bench Parse 12 | 13 | BenchmarkShotgunParse 50000 37588 ns/op 13258 B/op 167 allocs/op 14 | BenchmarkDateparseParseAny 500000 5752 ns/op 0 B/op 0 allocs/op 15 | 16 | */ 17 | func BenchmarkShotgunParse(b *testing.B) { 18 | b.ReportAllocs() 19 | for i := 0; i < b.N; i++ { 20 | for _, dateStr := range testDates { 21 | // This is the non dateparse traditional approach 22 | parseShotgunStyle(dateStr) 23 | } 24 | } 25 | } 26 | 27 | func BenchmarkParseAny(b *testing.B) { 28 | b.ReportAllocs() 29 | for i := 0; i < b.N; i++ { 30 | for _, dateStr := range testDates { 31 | ParseAny(dateStr) 32 | } 33 | } 34 | } 35 | 36 | var ( 37 | testDates = []string{ 38 | "2012/03/19 10:11:59", 39 | "2012/03/19 10:11:59.3186369", 40 | "2009-08-12T22:15:09-07:00", 41 | "2014-04-26 17:24:37.3186369", 42 | "2012-08-03 18:31:59.257000000", 43 | "2013-04-01 22:43:22", 44 | "2014-04-26 17:24:37.123", 45 | "2014-12-16 06:20:00 UTC", 46 | "1384216367189", 47 | "1332151919", 48 | "2014-05-11 08:20:13,787", 49 | "2014-04-26 05:24:37 PM", 50 | "2014-04-26", 51 | } 52 | 53 | ErrDateFormat = fmt.Errorf("Invalid Date Format") 54 | 55 | timeFormats = []string{ 56 | // ISO 8601ish formats 57 | time.RFC3339Nano, 58 | time.RFC3339, 59 | 60 | // Unusual formats, prefer formats with timezones 61 | time.RFC1123Z, 62 | time.RFC1123, 63 | time.RFC822Z, 64 | time.RFC822, 65 | time.UnixDate, 66 | time.RubyDate, 67 | time.ANSIC, 68 | 69 | // Hilariously, Go doesn't have a const for it's own time layout. 70 | // See: https://code.google.com/p/go/issues/detail?id=6587 71 | "2006-01-02 15:04:05.999999999 -0700 MST", 72 | 73 | // No timezone information 74 | "2006-01-02T15:04:05.999999999", 75 | "2006-01-02T15:04:05", 76 | "2006-01-02 15:04:05.999999999", 77 | "2006-01-02 15:04:05", 78 | } 79 | ) 80 | 81 | func parseShotgunStyle(raw string) (time.Time, error) { 82 | 83 | for _, format := range timeFormats { 84 | t, err := time.Parse(format, raw) 85 | if err == nil { 86 | // Parsed successfully 87 | return t, nil 88 | } 89 | } 90 | return time.Time{}, ErrDateFormat 91 | } 92 | -------------------------------------------------------------------------------- /dateparse/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "time" 7 | 8 | "github.com/apcera/termtables" 9 | "github.com/noaway/dateparse" 10 | ) 11 | 12 | var ( 13 | timezone = "" 14 | datestr = "" 15 | ) 16 | 17 | func main() { 18 | flag.StringVar(&timezone, "timezone", "", "Timezone aka `America/Los_Angeles` formatted time-zone") 19 | flag.Parse() 20 | 21 | if len(flag.Args()) == 0 { 22 | fmt.Println(`Must pass a time, and optional location: 23 | 24 | ./dateparse "2009-08-12T22:15:09.99Z" 25 | 26 | ./dateparse --timezone="America/Denver" "2017-07-19 03:21:51+00:00" 27 | `) 28 | return 29 | } 30 | 31 | datestr = flag.Args()[0] 32 | 33 | var loc *time.Location 34 | if timezone != "" { 35 | // NOTE: This is very, very important to understand 36 | // time-parsing in go 37 | l, err := time.LoadLocation(timezone) 38 | if err != nil { 39 | panic(err.Error()) 40 | } 41 | loc = l 42 | } 43 | 44 | zonename, _ := time.Now().In(time.Local).Zone() 45 | fmt.Printf("\nYour Current time.Local zone is %v\n\n", zonename) 46 | 47 | table := termtables.CreateTable() 48 | 49 | table.AddHeaders("method", "Zone Source", "Parsed", "Parsed: t.In(time.UTC)") 50 | 51 | parsers := map[string]parser{ 52 | "ParseAny": parseAny, 53 | "ParseIn": parseIn, 54 | "ParseLocal": parseLocal, 55 | } 56 | 57 | for name, parser := range parsers { 58 | time.Local = nil 59 | table.AddRow(name, "time.Local = nil", parser(datestr, nil), parser(datestr, nil).In(time.UTC)) 60 | if timezone != "" { 61 | time.Local = loc 62 | table.AddRow(name, "time.Local = timezone arg", parser(datestr, loc), parser(datestr, loc).In(time.UTC)) 63 | } 64 | time.Local = time.UTC 65 | table.AddRow(name, "time.Local = time.UTC", parser(datestr, time.UTC), parser(datestr, time.UTC).In(time.UTC)) 66 | } 67 | 68 | fmt.Println(table.Render()) 69 | } 70 | 71 | func stuff() (string, string) { 72 | return "more", "stuff" 73 | } 74 | 75 | type parser func(datestr string, loc *time.Location) time.Time 76 | 77 | func parseLocal(datestr string, loc *time.Location) time.Time { 78 | time.Local = loc 79 | t, _ := dateparse.ParseLocal(datestr) 80 | return t 81 | } 82 | 83 | func parseIn(datestr string, loc *time.Location) time.Time { 84 | t, _ := dateparse.ParseIn(datestr, loc) 85 | return t 86 | } 87 | 88 | func parseAny(datestr string, loc *time.Location) time.Time { 89 | t, _ := dateparse.ParseAny(datestr) 90 | return t 91 | } 92 | -------------------------------------------------------------------------------- /dateparse/README.md: -------------------------------------------------------------------------------- 1 | DateParse CLI 2 | ---------------------- 3 | 4 | Simple CLI to test out dateparse. 5 | 6 | 7 | ```sh 8 | 9 | # Since this date string has no timezone/offset so is more effected by 10 | # which method you use to parse 11 | 12 | $ dateparse --timezone="America/Denver" "2017-07-19 03:21:00" 13 | 14 | Your Current time.Local zone is PDT 15 | 16 | +------------+---------------------------+-------------------------------+-------------------------------+ 17 | | method | Zone Source | Parsed | Parsed: t.In(time.UTC) | 18 | +------------+---------------------------+-------------------------------+-------------------------------+ 19 | | ParseAny | time.Local = nil | 2017-07-19 03:21:00 +0000 UTC | 2017-07-19 03:21:00 +0000 UTC | 20 | | ParseAny | time.Local = timezone arg | 2017-07-19 03:21:00 +0000 UTC | 2017-07-19 03:21:00 +0000 UTC | 21 | | ParseAny | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 UTC | 2017-07-19 03:21:00 +0000 UTC | 22 | | ParseIn | time.Local = nil | 2017-07-19 03:21:00 +0000 UTC | 2017-07-19 03:21:00 +0000 UTC | 23 | | ParseIn | time.Local = timezone arg | 2017-07-19 03:21:00 -0600 MDT | 2017-07-19 09:21:00 +0000 UTC | 24 | | ParseIn | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 UTC | 2017-07-19 03:21:00 +0000 UTC | 25 | | ParseLocal | time.Local = nil | 2017-07-19 03:21:00 +0000 UTC | 2017-07-19 03:21:00 +0000 UTC | 26 | | ParseLocal | time.Local = timezone arg | 2017-07-19 03:21:00 -0600 MDT | 2017-07-19 09:21:00 +0000 UTC | 27 | | ParseLocal | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 UTC | 2017-07-19 03:21:00 +0000 UTC | 28 | +------------+---------------------------+-------------------------------+-------------------------------+ 29 | 30 | # Note on this one that the outputed zone is always UTC/0 offset as opposed to above 31 | 32 | $ dateparse --timezone="America/Denver" "2017-07-19 03:21:51+00:00" 33 | 34 | Your Current time.Local zone is PDT 35 | 36 | +------------+---------------------------+---------------------------------+-------------------------------+ 37 | | method | Zone Source | Parsed | Parsed: t.In(time.UTC) | 38 | +------------+---------------------------+---------------------------------+-------------------------------+ 39 | | ParseAny | time.Local = nil | 2017-07-19 03:21:51 +0000 UTC | 2017-07-19 03:21:51 +0000 UTC | 40 | | ParseAny | time.Local = timezone arg | 2017-07-19 03:21:51 +0000 +0000 | 2017-07-19 03:21:51 +0000 UTC | 41 | | ParseAny | time.Local = time.UTC | 2017-07-19 03:21:51 +0000 UTC | 2017-07-19 03:21:51 +0000 UTC | 42 | | ParseIn | time.Local = nil | 2017-07-19 03:21:51 +0000 UTC | 2017-07-19 03:21:51 +0000 UTC | 43 | | ParseIn | time.Local = timezone arg | 2017-07-19 03:21:51 +0000 +0000 | 2017-07-19 03:21:51 +0000 UTC | 44 | | ParseIn | time.Local = time.UTC | 2017-07-19 03:21:51 +0000 UTC | 2017-07-19 03:21:51 +0000 UTC | 45 | | ParseLocal | time.Local = nil | 2017-07-19 03:21:51 +0000 UTC | 2017-07-19 03:21:51 +0000 UTC | 46 | | ParseLocal | time.Local = timezone arg | 2017-07-19 03:21:51 +0000 +0000 | 2017-07-19 03:21:51 +0000 UTC | 47 | | ParseLocal | time.Local = time.UTC | 2017-07-19 03:21:51 +0000 UTC | 2017-07-19 03:21:51 +0000 UTC | 48 | +------------+---------------------------+---------------------------------+-------------------------------+ 49 | 50 | 51 | $ dateparse --timezone="America/Denver" "Monday, 19-Jul-17 03:21:00 MDT" 52 | 53 | Your Current time.Local zone is PDT 54 | 55 | +------------+---------------------------+-------------------------------+-------------------------------+ 56 | | method | Zone Source | Parsed | Parsed: t.In(time.UTC) | 57 | +------------+---------------------------+-------------------------------+-------------------------------+ 58 | | ParseAny | time.Local = nil | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 59 | | ParseAny | time.Local = timezone arg | 2017-07-19 03:21:00 -0600 MDT | 2017-07-19 09:21:00 +0000 UTC | 60 | | ParseAny | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 61 | | ParseIn | time.Local = nil | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 62 | | ParseIn | time.Local = timezone arg | 2017-07-19 03:21:00 -0600 MDT | 2017-07-19 09:21:00 +0000 UTC | 63 | | ParseIn | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 64 | | ParseLocal | time.Local = nil | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 65 | | ParseLocal | time.Local = timezone arg | 2017-07-19 03:21:00 -0600 MDT | 2017-07-19 09:21:00 +0000 UTC | 66 | | ParseLocal | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 67 | +------------+---------------------------+-------------------------------+-------------------------------+ 68 | 69 | 70 | # pass in a wrong timezone "MST" (should be MDT) 71 | $ dateparse --timezone="America/Denver" "Monday, 19-Jul-17 03:21:00 MST" 72 | 73 | Your Current time.Local zone is PDT 74 | 75 | +------------+---------------------------+-------------------------------+-------------------------------+ 76 | | method | Zone Source | Parsed | Parsed: t.In(time.UTC) | 77 | +------------+---------------------------+-------------------------------+-------------------------------+ 78 | | ParseAny | time.Local = nil | 2017-07-19 03:21:00 +0000 MST | 2017-07-19 03:21:00 +0000 UTC | 79 | | ParseAny | time.Local = timezone arg | 2017-07-19 04:21:00 -0600 MDT | 2017-07-19 10:21:00 +0000 UTC | 80 | | ParseAny | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MST | 2017-07-19 03:21:00 +0000 UTC | 81 | | ParseIn | time.Local = nil | 2017-07-19 03:21:00 +0000 MST | 2017-07-19 03:21:00 +0000 UTC | 82 | | ParseIn | time.Local = timezone arg | 2017-07-19 04:21:00 -0600 MDT | 2017-07-19 10:21:00 +0000 UTC | 83 | | ParseIn | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MST | 2017-07-19 03:21:00 +0000 UTC | 84 | | ParseLocal | time.Local = nil | 2017-07-19 03:21:00 +0000 MST | 2017-07-19 03:21:00 +0000 UTC | 85 | | ParseLocal | time.Local = timezone arg | 2017-07-19 04:21:00 -0600 MDT | 2017-07-19 10:21:00 +0000 UTC | 86 | | ParseLocal | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MST | 2017-07-19 03:21:00 +0000 UTC | 87 | +------------+---------------------------+-------------------------------+-------------------------------+ 88 | 89 | 90 | # note, we are using America/New_York which doesn't recognize MDT so essentially ignores it 91 | $ dateparse --timezone="America/New_York" "Monday, 19-Jul-17 03:21:00 MDT" 92 | 93 | Your Current time.Local zone is PDT 94 | 95 | +------------+---------------------------+-------------------------------+-------------------------------+ 96 | | method | Zone Source | Parsed | Parsed: t.In(time.UTC) | 97 | +------------+---------------------------+-------------------------------+-------------------------------+ 98 | | ParseAny | time.Local = nil | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 99 | | ParseAny | time.Local = timezone arg | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 100 | | ParseAny | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 101 | | ParseIn | time.Local = nil | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 102 | | ParseIn | time.Local = timezone arg | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 103 | | ParseIn | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 104 | | ParseLocal | time.Local = nil | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 105 | | ParseLocal | time.Local = timezone arg | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 106 | | ParseLocal | time.Local = time.UTC | 2017-07-19 03:21:00 +0000 MDT | 2017-07-19 03:21:00 +0000 UTC | 107 | +------------+---------------------------+-------------------------------+-------------------------------+ 108 | 109 | 110 | ``` -------------------------------------------------------------------------------- /example/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "time" 7 | 8 | "github.com/apcera/termtables" 9 | "github.com/noaway/dateparse" 10 | ) 11 | 12 | // _ _ 13 | // | | | | 14 | // __| | __ _ | |_ ___ _ __ __ _ _ __ ___ ___ 15 | // / _` | / _` | | __| / _ \ | '_ \ / _` | | '__| / __| / _ \ 16 | // | (_| | | (_| | | |_ | __/ | |_) | | (_| | | | \__ \ | __/ 17 | // \__,_| \__,_| \__| \___| | .__/ \__,_| |_| |___/ \___| 18 | // | | 19 | // |_| 20 | 21 | var examples = []string{ 22 | "May 8, 2009 5:57:51 PM", 23 | "Nov 8, 2017", 24 | "Mon Jan 2 15:04:05 2006", 25 | "Mon Jan 2 15:04:05 MST 2006", 26 | "Mon Jan 02 15:04:05 -0700 2006", 27 | "Monday, 02-Jan-06 15:04:05 MST", 28 | "Mon, 02 Jan 2006 15:04:05 MST", 29 | "Tue, 11 Jul 2017 16:28:13 +0200 (CEST)", 30 | "Mon, 02 Jan 2006 15:04:05 -0700", 31 | "Mon Aug 10 15:44:11 UTC+0100 2015", 32 | "Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)", 33 | "12 Feb 2006, 19:17", 34 | "2013-Feb-03", 35 | // mm/dd/yy 36 | "3/31/2014", 37 | "03/31/2014", 38 | "08/21/71", 39 | "8/1/71", 40 | "4/8/2014 22:05", 41 | "04/08/2014 22:05", 42 | "04/2/2014 03:00:51", 43 | "8/8/1965 12:00:00 AM", 44 | "8/8/1965 01:00:01 PM", 45 | "8/8/1965 01:00 PM", 46 | "8/8/1965 1:00 PM", 47 | "8/8/1965 12:00 AM", 48 | "4/02/2014 03:00:51", 49 | "03/19/2012 10:11:59", 50 | "03/19/2012 10:11:59.3186369", 51 | // yyyy/mm/dd 52 | "2014/3/31", 53 | "2014/03/31", 54 | "2014/4/8 22:05", 55 | "2014/04/08 22:05", 56 | "2014/04/2 03:00:51", 57 | "2014/4/02 03:00:51", 58 | "2012/03/19 10:11:59", 59 | "2012/03/19 10:11:59.3186369", 60 | // yyyy-mm-ddThh 61 | "2006-01-02T15:04:05+0000", 62 | "2009-08-12T22:15:09-07:00", 63 | "2009-08-12T22:15:09", 64 | "2009-08-12T22:15:09Z", 65 | // yyyy-mm-dd hh:mm:ss 66 | "2014-04-26 17:24:37.3186369", 67 | "2012-08-03 18:31:59.257000000", 68 | "2014-04-26 17:24:37.123", 69 | "2013-04-01 22:43:22", 70 | "2014-12-16 06:20:00 UTC", 71 | "2014-12-16 06:20:00 GMT", 72 | "2014-04-26 05:24:37 PM", 73 | "2014-04-26 13:13:43 +0800", 74 | "2014-04-26 13:13:44 +09:00", 75 | "2012-08-03 18:31:59.257000000 +0000 UTC", 76 | "2015-09-30 18:48:56.35272715 +0000 UTC", 77 | "2015-02-18 00:12:00 +0000 GMT", 78 | "2015-02-18 00:12:00 +0000 UTC", 79 | "2017-07-19 03:21:51+00:00", 80 | "2014-04-26", 81 | "2014-04", 82 | "2014", 83 | "2014-05-11 08:20:13,787", 84 | // yyyymmdd and similar 85 | "20140601", 86 | // unix seconds, ms 87 | "1332151919", 88 | "1384216367189", 89 | // Chinese date 90 | "2017年11月09日", 91 | "2017年11月01日 09:41", 92 | // How long ago 93 | "1 day ago", 94 | "19 hours ago", 95 | "26 minutes ago", 96 | } 97 | 98 | var ( 99 | timezone = "" 100 | ) 101 | 102 | func main() { 103 | flag.StringVar(&timezone, "timezone", "UTC", "Timezone aka `America/Los_Angeles` formatted time-zone") 104 | flag.Parse() 105 | 106 | if timezone != "" { 107 | // NOTE: This is very, very important to understand 108 | // time-parsing in go 109 | loc, err := time.LoadLocation(timezone) 110 | if err != nil { 111 | panic(err.Error()) 112 | } 113 | time.Local = loc 114 | } 115 | 116 | table := termtables.CreateTable() 117 | 118 | table.AddHeaders("Input", "Parsed, and Output as %v") 119 | for _, dateExample := range examples { 120 | // t, err := dateparse.ParseLocal(dateExample) 121 | t, err := dateparse.ParseAny(dateExample) 122 | if err != nil { 123 | panic(err.Error()) 124 | } 125 | table.AddRow(dateExample, fmt.Sprintf("%v", t)) 126 | } 127 | fmt.Println(table.Render()) 128 | } 129 | 130 | /* 131 | +-------------------------------------------------------+------------------------------------------------------------+ 132 | | Input | Parsed, and Output as %v | 133 | +-------------------------------------------------------+------------------------------------------------------------+ 134 | | May 8, 2009 5:57:51 PM | 2009-05-08 17:57:51 +0000 UTC | 135 | | Nov 8, 2017 | 2017-11-08 00:00:00 +0000 UTC | 136 | | Mon Jan 2 15:04:05 2006 | 2006-01-02 15:04:05 +0000 UTC | 137 | | Mon Jan 2 15:04:05 MST 2006 | 2006-01-02 15:04:05 +0000 MST | 138 | | Mon Jan 02 15:04:05 -0700 2006 | 2006-01-02 15:04:05 -0700 -0700 | 139 | | Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST | 140 | | Mon, 02 Jan 2006 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST | 141 | | Tue, 11 Jul 2017 16:28:13 +0200 (CEST) | 2017-07-11 16:28:13 +0200 +0200 | 142 | | Mon, 02 Jan 2006 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 | 143 | | Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC | 144 | | Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT | 145 | | 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC | 146 | | 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC | 147 | | 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC | 148 | | 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC | 149 | | 08/21/71 | 1971-08-21 00:00:00 +0000 UTC | 150 | | 8/1/71 | 1971-08-01 00:00:00 +0000 UTC | 151 | | 4/8/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC | 152 | | 04/08/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC | 153 | | 04/2/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 154 | | 8/8/1965 12:00:00 AM | 1965-08-08 00:00:00 +0000 UTC | 155 | | 8/8/1965 01:00:01 PM | 1965-08-08 13:00:01 +0000 UTC | 156 | | 8/8/1965 01:00 PM | 1965-08-08 13:00:00 +0000 UTC | 157 | | 8/8/1965 1:00 PM | 1965-08-08 13:00:00 +0000 UTC | 158 | | 8/8/1965 12:00 AM | 1965-08-08 00:00:00 +0000 UTC | 159 | | 4/02/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 160 | | 03/19/2012 10:11:59 | 2012-03-19 10:11:59 +0000 UTC | 161 | | 03/19/2012 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC | 162 | | 2014/3/31 | 2014-03-31 00:00:00 +0000 UTC | 163 | | 2014/03/31 | 2014-03-31 00:00:00 +0000 UTC | 164 | | 2014/4/8 22:05 | 2014-04-08 22:05:00 +0000 UTC | 165 | | 2014/04/08 22:05 | 2014-04-08 22:05:00 +0000 UTC | 166 | | 2014/04/2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 167 | | 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 168 | | 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC | 169 | | 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC | 170 | | 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC | 171 | | 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 | 172 | | 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC | 173 | | 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC | 174 | | 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC | 175 | | 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC | 176 | | 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC | 177 | | 2013-04-01 22:43:22 | 2013-04-01 22:43:22 +0000 UTC | 178 | | 2014-12-16 06:20:00 UTC | 2014-12-16 06:20:00 +0000 UTC | 179 | | 2014-12-16 06:20:00 GMT | 2014-12-16 06:20:00 +0000 UTC | 180 | | 2014-04-26 05:24:37 PM | 2014-04-26 17:24:37 +0000 UTC | 181 | | 2014-04-26 13:13:43 +0800 | 2014-04-26 13:13:43 +0800 +0800 | 182 | | 2014-04-26 13:13:44 +09:00 | 2014-04-26 13:13:44 +0900 +0900 | 183 | | 2012-08-03 18:31:59.257000000 +0000 UTC | 2012-08-03 18:31:59.257 +0000 UTC | 184 | | 2015-09-30 18:48:56.35272715 +0000 UTC | 2015-09-30 18:48:56.35272715 +0000 UTC | 185 | | 2015-02-18 00:12:00 +0000 GMT | 2015-02-18 00:12:00 +0000 UTC | 186 | | 2015-02-18 00:12:00 +0000 UTC | 2015-02-18 00:12:00 +0000 UTC | 187 | | 2017-07-19 03:21:51+00:00 | 2017-07-19 03:21:51 +0000 UTC | 188 | | 2014-04-26 | 2014-04-26 00:00:00 +0000 UTC | 189 | | 2014-04 | 2014-04-01 00:00:00 +0000 UTC | 190 | | 2014 | 2014-01-01 00:00:00 +0000 UTC | 191 | | 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC | 192 | | 20140601 | 2014-06-01 00:00:00 +0000 UTC | 193 | | 1332151919 | 2012-03-19 10:11:59 +0000 UTC | 194 | | 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC | 195 | | 2017年11月09日 | 2017-11-09 00:00:00 +0000 UTC | 196 | | 2017年11月01日 09:41 | 2017-11-01 09:41:00 +0000 UTC | 197 | | 1 day ago | 2017-11-08 11:07:30.982877633 +0000 UTC m=-86399.999181941 | 198 | | 19 hours ago | 2017-11-08 16:07:30.982880364 +0000 UTC m=-68399.999179210 | 199 | | 26 minutes ago | 2017-11-09 10:41:30.982884592 +0000 UTC m=-1559.999174982 | 200 | +-------------------------------------------------------+------------------------------------------------------------+ 201 | */ 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Go Date Parser 2 | --------------------------- 3 | 4 | Parse any date string without knowing format in advance. Uses a scanner to read bytes and use a state machine to find format. Much faster than shotgun based parse methods. See [bench_test.go](https://github.com/noaway/dateparse/blob/master/bench_test.go) for performance comparison. 5 | 6 | 7 | 8 | **Timezones** The location your server is configured effects the results! See example or https://play.golang.org/p/IDHRalIyXh and last paragraph here https://golang.org/pkg/time/#Parse. 9 | 10 | 11 | ```go 12 | 13 | // Normal parse. Equivalent Timezone rules as time.Parse() 14 | t, err := dateparse.ParseAny("3/1/2014") 15 | 16 | // Parse with Location, equivalent to time.ParseInLocation() timezone/offset 17 | // rules. Using location arg, if timezone/offset info exists in the 18 | // datestring, it uses the given location rules for any zone interpretation. 19 | // That is, MST means one thing when using America/Denver and something else 20 | // in other locations. 21 | denverLoc, _ := time.LoadLocation("America/Denver") 22 | t, err := dateparse.ParseIn("3/1/2014", denverLoc) 23 | 24 | // Set Location to time.Local. Same as ParseIn Location but lazily uses 25 | // the global time.Local variable for Location argument. 26 | denverLoc, _ := time.LoadLocation("America/Denver") 27 | // use time.Local global variable to store location 28 | time.Local = denverLoc 29 | t, err := dateparse.ParseLocal("3/1/2014") 30 | // Equivalent to 31 | t, err := dateparse.ParseIn("3/1/2014", time.Local) 32 | 33 | ``` 34 | 35 | cli tool for testing dateformats 36 | ---------------------------------- 37 | 38 | [Date Parse CLI](https://github.com/noaway/dateparse/blob/master/dateparse) 39 | 40 | 41 | Extended example 42 | ------------------- 43 | 44 | https://github.com/noaway/dateparse/blob/master/example/main.go 45 | 46 | ```go 47 | package main 48 | 49 | import ( 50 | "flag" 51 | "fmt" 52 | "time" 53 | 54 | "github.com/apcera/termtables" 55 | "github.com/noaway/dateparse" 56 | ) 57 | 58 | // _ _ 59 | // | | | | 60 | // __| | __ _ | |_ ___ _ __ __ _ _ __ ___ ___ 61 | // / _` | / _` | | __| / _ \ | '_ \ / _` | | '__| / __| / _ \ 62 | // | (_| | | (_| | | |_ | __/ | |_) | | (_| | | | \__ \ | __/ 63 | // \__,_| \__,_| \__| \___| | .__/ \__,_| |_| |___/ \___| 64 | // | | 65 | // |_| 66 | 67 | var examples = []string{ 68 | "May 8, 2009 5:57:51 PM", 69 | "Nov 8, 2017", 70 | "Mon Jan 2 15:04:05 2006", 71 | "Mon Jan 2 15:04:05 MST 2006", 72 | "Mon Jan 02 15:04:05 -0700 2006", 73 | "Monday, 02-Jan-06 15:04:05 MST", 74 | "Mon, 02 Jan 2006 15:04:05 MST", 75 | "Tue, 11 Jul 2017 16:28:13 +0200 (CEST)", 76 | "Mon, 02 Jan 2006 15:04:05 -0700", 77 | "Mon Aug 10 15:44:11 UTC+0100 2015", 78 | "Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)", 79 | "12 Feb 2006, 19:17", 80 | "2013-Feb-03", 81 | // mm/dd/yy 82 | "3/31/2014", 83 | "03/31/2014", 84 | "08/21/71", 85 | "8/1/71", 86 | "4/8/2014 22:05", 87 | "04/08/2014 22:05", 88 | "04/2/2014 03:00:51", 89 | "8/8/1965 12:00:00 AM", 90 | "8/8/1965 01:00:01 PM", 91 | "8/8/1965 01:00 PM", 92 | "8/8/1965 1:00 PM", 93 | "8/8/1965 12:00 AM", 94 | "4/02/2014 03:00:51", 95 | "03/19/2012 10:11:59", 96 | "03/19/2012 10:11:59.3186369", 97 | // yyyy/mm/dd 98 | "2014/3/31", 99 | "2014/03/31", 100 | "2014/4/8 22:05", 101 | "2014/04/08 22:05", 102 | "2014/04/2 03:00:51", 103 | "2014/4/02 03:00:51", 104 | "2012/03/19 10:11:59", 105 | "2012/03/19 10:11:59.3186369", 106 | // yyyy-mm-ddThh 107 | "2006-01-02T15:04:05+0000", 108 | "2009-08-12T22:15:09-07:00", 109 | "2009-08-12T22:15:09", 110 | "2009-08-12T22:15:09Z", 111 | // yyyy-mm-dd hh:mm:ss 112 | "2014-04-26 17:24:37.3186369", 113 | "2012-08-03 18:31:59.257000000", 114 | "2014-04-26 17:24:37.123", 115 | "2013-04-01 22:43:22", 116 | "2014-12-16 06:20:00 UTC", 117 | "2014-12-16 06:20:00 GMT", 118 | "2014-04-26 05:24:37 PM", 119 | "2014-04-26 13:13:43 +0800", 120 | "2014-04-26 13:13:44 +09:00", 121 | "2012-08-03 18:31:59.257000000 +0000 UTC", 122 | "2015-09-30 18:48:56.35272715 +0000 UTC", 123 | "2015-02-18 00:12:00 +0000 GMT", 124 | "2015-02-18 00:12:00 +0000 UTC", 125 | "2017-07-19 03:21:51+00:00", 126 | "2014-04-26", 127 | "2014-04", 128 | "2014", 129 | "2014-05-11 08:20:13,787", 130 | // yyyymmdd and similar 131 | "20140601", 132 | // unix seconds, ms 133 | "1332151919", 134 | "1384216367189", 135 | // Chinese date 136 | "2017年11月09日", 137 | "2017年11月01日 09:41", 138 | // How long ago 139 | "1 day ago", 140 | "19 hours ago", 141 | "26 minutes ago", 142 | } 143 | 144 | var ( 145 | timezone = "" 146 | ) 147 | 148 | func main() { 149 | flag.StringVar(&timezone, "timezone", "UTC", "Timezone aka `America/Los_Angeles` formatted time-zone") 150 | flag.Parse() 151 | 152 | if timezone != "" { 153 | // NOTE: This is very, very important to understand 154 | // time-parsing in go 155 | loc, err := time.LoadLocation(timezone) 156 | if err != nil { 157 | panic(err.Error()) 158 | } 159 | time.Local = loc 160 | } 161 | 162 | table := termtables.CreateTable() 163 | 164 | table.AddHeaders("Input", "Parsed, and Output as %v") 165 | for _, dateExample := range examples { 166 | // t, err := dateparse.ParseLocal(dateExample) 167 | t, err := dateparse.ParseAny(dateExample) 168 | if err != nil { 169 | panic(err.Error()) 170 | } 171 | table.AddRow(dateExample, fmt.Sprintf("%v", t)) 172 | } 173 | fmt.Println(table.Render()) 174 | } 175 | 176 | /* 177 | +-------------------------------------------------------+------------------------------------------------------------+ 178 | | Input | Parsed, and Output as %v | 179 | +-------------------------------------------------------+------------------------------------------------------------+ 180 | | May 8, 2009 5:57:51 PM | 2009-05-08 17:57:51 +0000 UTC | 181 | | Nov 8, 2017 | 2017-11-08 00:00:00 +0000 UTC | 182 | | Mon Jan 2 15:04:05 2006 | 2006-01-02 15:04:05 +0000 UTC | 183 | | Mon Jan 2 15:04:05 MST 2006 | 2006-01-02 15:04:05 +0000 MST | 184 | | Mon Jan 02 15:04:05 -0700 2006 | 2006-01-02 15:04:05 -0700 -0700 | 185 | | Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST | 186 | | Mon, 02 Jan 2006 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST | 187 | | Tue, 11 Jul 2017 16:28:13 +0200 (CEST) | 2017-07-11 16:28:13 +0200 +0200 | 188 | | Mon, 02 Jan 2006 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 | 189 | | Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC | 190 | | Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT | 191 | | 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC | 192 | | 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC | 193 | | 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC | 194 | | 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC | 195 | | 08/21/71 | 1971-08-21 00:00:00 +0000 UTC | 196 | | 8/1/71 | 1971-08-01 00:00:00 +0000 UTC | 197 | | 4/8/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC | 198 | | 04/08/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC | 199 | | 04/2/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 200 | | 8/8/1965 12:00:00 AM | 1965-08-08 00:00:00 +0000 UTC | 201 | | 8/8/1965 01:00:01 PM | 1965-08-08 13:00:01 +0000 UTC | 202 | | 8/8/1965 01:00 PM | 1965-08-08 13:00:00 +0000 UTC | 203 | | 8/8/1965 1:00 PM | 1965-08-08 13:00:00 +0000 UTC | 204 | | 8/8/1965 12:00 AM | 1965-08-08 00:00:00 +0000 UTC | 205 | | 4/02/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 206 | | 03/19/2012 10:11:59 | 2012-03-19 10:11:59 +0000 UTC | 207 | | 03/19/2012 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC | 208 | | 2014/3/31 | 2014-03-31 00:00:00 +0000 UTC | 209 | | 2014/03/31 | 2014-03-31 00:00:00 +0000 UTC | 210 | | 2014/4/8 22:05 | 2014-04-08 22:05:00 +0000 UTC | 211 | | 2014/04/08 22:05 | 2014-04-08 22:05:00 +0000 UTC | 212 | | 2014/04/2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 213 | | 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC | 214 | | 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC | 215 | | 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC | 216 | | 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC | 217 | | 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 | 218 | | 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC | 219 | | 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC | 220 | | 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC | 221 | | 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC | 222 | | 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC | 223 | | 2013-04-01 22:43:22 | 2013-04-01 22:43:22 +0000 UTC | 224 | | 2014-12-16 06:20:00 UTC | 2014-12-16 06:20:00 +0000 UTC | 225 | | 2014-12-16 06:20:00 GMT | 2014-12-16 06:20:00 +0000 UTC | 226 | | 2014-04-26 05:24:37 PM | 2014-04-26 17:24:37 +0000 UTC | 227 | | 2014-04-26 13:13:43 +0800 | 2014-04-26 13:13:43 +0800 +0800 | 228 | | 2014-04-26 13:13:44 +09:00 | 2014-04-26 13:13:44 +0900 +0900 | 229 | | 2012-08-03 18:31:59.257000000 +0000 UTC | 2012-08-03 18:31:59.257 +0000 UTC | 230 | | 2015-09-30 18:48:56.35272715 +0000 UTC | 2015-09-30 18:48:56.35272715 +0000 UTC | 231 | | 2015-02-18 00:12:00 +0000 GMT | 2015-02-18 00:12:00 +0000 UTC | 232 | | 2015-02-18 00:12:00 +0000 UTC | 2015-02-18 00:12:00 +0000 UTC | 233 | | 2017-07-19 03:21:51+00:00 | 2017-07-19 03:21:51 +0000 UTC | 234 | | 2014-04-26 | 2014-04-26 00:00:00 +0000 UTC | 235 | | 2014-04 | 2014-04-01 00:00:00 +0000 UTC | 236 | | 2014 | 2014-01-01 00:00:00 +0000 UTC | 237 | | 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC | 238 | | 20140601 | 2014-06-01 00:00:00 +0000 UTC | 239 | | 1332151919 | 2012-03-19 10:11:59 +0000 UTC | 240 | | 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC | 241 | | 2017年11月09日 | 2017-11-09 00:00:00 +0000 UTC | 242 | | 2017年11月01日 09:41 | 2017-11-01 09:41:00 +0000 UTC | 243 | | 1 day ago | 2017-11-08 11:07:30.982877633 +0000 UTC m=-86399.999181941 | 244 | | 19 hours ago | 2017-11-08 16:07:30.982880364 +0000 UTC m=-68399.999179210 | 245 | | 26 minutes ago | 2017-11-09 10:41:30.982884592 +0000 UTC m=-1559.999174982 | 246 | +-------------------------------------------------------+------------------------------------------------------------+ 247 | */ 248 | 249 | 250 | ``` 251 | -------------------------------------------------------------------------------- /parseany_test.go: -------------------------------------------------------------------------------- 1 | package dateparse 2 | 3 | import ( 4 | "fmt" 5 | "strconv" 6 | "strings" 7 | "time" 8 | "unicode" 9 | ) 10 | 11 | type dateState int 12 | 13 | const ( 14 | stateStart dateState = iota 15 | stateDigit 16 | stateDigitDash 17 | stateDigitDashAlpha 18 | stateDigitDashWs 19 | stateDigitDashWsWs 20 | stateDigitDashWsWsAMPMMaybe 21 | stateDigitDashWsWsOffset 22 | stateDigitDashWsWsOffsetAlpha 23 | stateDigitDashWsWsOffsetColonAlpha 24 | stateDigitDashWsWsOffsetColon 25 | stateDigitDashWsOffset 26 | stateDigitDashWsWsAlpha 27 | stateDigitDashWsPeriod 28 | stateDigitDashWsPeriodAlpha 29 | stateDigitDashWsPeriodOffset 30 | stateDigitDashWsPeriodOffsetAlpha 31 | stateDigitDashT 32 | stateDigitDashTZ 33 | stateDigitDashTZDigit 34 | stateDigitDashTOffset 35 | stateDigitDashTOffsetColon 36 | stateDigitSlash 37 | stateDigitSlashWS 38 | stateDigitSlashWSColon 39 | stateDigitSlashWSColonAMPM 40 | stateDigitSlashWSColonColon 41 | stateDigitSlashWSColonColonAMPM 42 | stateDigitAlpha 43 | stateAlpha 44 | stateAlphaWS 45 | stateAlphaWSDigitComma 46 | stateAlphaWSAlpha 47 | stateAlphaWSAlphaColon 48 | stateAlphaWSAlphaColonOffset 49 | stateAlphaWSAlphaColonAlpha 50 | stateAlphaWSAlphaColonAlphaOffset 51 | stateAlphaWSAlphaColonAlphaOffsetAlpha 52 | stateWeekdayComma 53 | stateWeekdayCommaOffset 54 | stateWeekdayAbbrevComma 55 | stateWeekdayAbbrevCommaOffset 56 | stateWeekdayAbbrevCommaOffsetZone 57 | stateHowLongAgo 58 | ) 59 | 60 | const ( 61 | Day = time.Hour * 24 62 | ) 63 | 64 | var ( 65 | shortDates = []string{"01/02/2006", "1/2/2006", "06/01/02", "01/02/06", "1/2/06"} 66 | ) 67 | 68 | // ParseAny parse an unknown date format, detect the layout, parse. 69 | // Normal parse. Equivalent Timezone rules as time.Parse() 70 | func ParseAny(datestr string) (time.Time, error) { 71 | return parseTime(datestr, nil) 72 | } 73 | 74 | // ParseIn with Location, equivalent to time.ParseInLocation() timezone/offset 75 | // rules. Using location arg, if timezone/offset info exists in the 76 | // datestring, it uses the given location rules for any zone interpretation. 77 | // That is, MST means one thing when using America/Denver and something else 78 | // in other locations. 79 | func ParseIn(datestr string, loc *time.Location) (time.Time, error) { 80 | return parseTime(datestr, loc) 81 | } 82 | 83 | // ParseLocal Given an unknown date format, detect the layout, 84 | // using time.Local, parse. 85 | // 86 | // Set Location to time.Local. Same as ParseIn Location but lazily uses 87 | // the global time.Local variable for Location argument. 88 | // 89 | // denverLoc, _ := time.LoadLocation("America/Denver") 90 | // time.Local = denverLoc 91 | // 92 | // t, err := dateparse.ParseLocal("3/1/2014") 93 | // 94 | // Equivalent to: 95 | // 96 | // t, err := dateparse.ParseIn("3/1/2014", denverLoc) 97 | // 98 | func ParseLocal(datestr string) (time.Time, error) { 99 | return parseTime(datestr, time.Local) 100 | } 101 | 102 | // MustParse parse a date, and panic if it can't be parsed. Used for testing. 103 | // Not recommended for most use-cases. 104 | func MustParse(datestr string) time.Time { 105 | t, err := parseTime(datestr, nil) 106 | if err != nil { 107 | panic(err.Error()) 108 | } 109 | return t 110 | } 111 | 112 | func parse(layout, datestr string, loc *time.Location) (time.Time, error) { 113 | if loc == nil { 114 | return time.Parse(layout, datestr) 115 | } 116 | return time.ParseInLocation(layout, datestr, loc) 117 | } 118 | 119 | func parseTime(datestr string, loc *time.Location) (time.Time, error) { 120 | state := stateStart 121 | 122 | firstSlash := 0 123 | 124 | // General strategy is to read rune by rune through the date looking for 125 | // certain hints of what type of date we are dealing with. 126 | // Hopefully we only need to read about 5 or 6 bytes before 127 | // we figure it out and then attempt a parse 128 | iterRunes: 129 | for i := 0; i < len(datestr); i++ { 130 | r := rune(datestr[i]) 131 | // r, bytesConsumed := utf8.DecodeRuneInString(datestr[ri:]) 132 | // if bytesConsumed > 1 { 133 | // ri += (bytesConsumed - 1) 134 | // } 135 | 136 | switch state { 137 | case stateStart: 138 | if unicode.IsDigit(r) { 139 | state = stateDigit 140 | } else if unicode.IsLetter(r) { 141 | state = stateAlpha 142 | } 143 | case stateDigit: // starts digits 144 | if unicode.IsDigit(r) { 145 | continue 146 | } else if unicode.IsLetter(r) { 147 | state = stateDigitAlpha 148 | continue 149 | } 150 | switch r { 151 | case '-', '\u2212': 152 | state = stateDigitDash 153 | case '/': 154 | state = stateDigitSlash 155 | firstSlash = i 156 | case ' ': 157 | state = stateHowLongAgo 158 | } 159 | case stateDigitDash: // starts digit then dash 02- 160 | // 2006-01-02T15:04:05Z07:00 161 | // 2017-06-25T17:46:57.45706582-07:00 162 | // 2006-01-02T15:04:05.999999999Z07:00 163 | // 2006-01-02T15:04:05+0000 164 | // 2012-08-03 18:31:59.257000000 165 | // 2014-04-26 17:24:37.3186369 166 | // 2017-01-27 00:07:31.945167 167 | // 2016-03-14 00:00:00.000 168 | // 2014-05-11 08:20:13,787 169 | // 2017-07-19 03:21:51+00:00 170 | // 2006-01-02 171 | // 2013-04-01 22:43:22 172 | // 2014-04-26 05:24:37 PM 173 | // 2013-Feb-03 174 | switch { 175 | case r == ' ': 176 | state = stateDigitDashWs 177 | case r == 'T': 178 | state = stateDigitDashT 179 | default: 180 | if unicode.IsLetter(r) { 181 | state = stateDigitDashAlpha 182 | break iterRunes 183 | } 184 | } 185 | case stateDigitDashWs: 186 | // 2013-04-01 22:43:22 187 | // 2014-05-11 08:20:13,787 188 | // stateDigitDashWsWs 189 | // 2014-04-26 05:24:37 PM 190 | // 2014-12-16 06:20:00 UTC 191 | // 2015-02-18 00:12:00 +0000 UTC 192 | // 2006-01-02 15:04:05 -0700 193 | // 2006-01-02 15:04:05 -07:00 194 | // stateDigitDashWsOffset 195 | // 2017-07-19 03:21:51+00:00 196 | // stateDigitDashWsPeriod 197 | // 2014-04-26 17:24:37.3186369 198 | // 2017-01-27 00:07:31.945167 199 | // 2012-08-03 18:31:59.257000000 200 | // 2016-03-14 00:00:00.000 201 | // stateDigitDashWsPeriodOffset 202 | // 2017-01-27 00:07:31.945167 +0000 203 | // 2016-03-14 00:00:00.000 +0000 204 | // stateDigitDashWsPeriodOffsetAlpha 205 | // 2017-01-27 00:07:31.945167 +0000 UTC 206 | // 2016-03-14 00:00:00.000 +0000 UTC 207 | // stateDigitDashWsPeriodAlpha 208 | // 2014-12-16 06:20:00.000 UTC 209 | switch r { 210 | case ',': 211 | if len(datestr) == len("2014-05-11 08:20:13,787") { 212 | // go doesn't seem to parse this one natively? or did i miss it? 213 | t, err := parse("2006-01-02 03:04:05", datestr[:i], loc) 214 | if err == nil { 215 | ms, err := strconv.Atoi(datestr[i+1:]) 216 | if err == nil { 217 | return time.Unix(0, t.UnixNano()+int64(ms)*1e6), nil 218 | } 219 | } 220 | return t, err 221 | } 222 | case '-', '+': 223 | state = stateDigitDashWsOffset 224 | case '.': 225 | state = stateDigitDashWsPeriod 226 | case ' ': 227 | state = stateDigitDashWsWs 228 | } 229 | 230 | case stateDigitDashWsWs: 231 | // stateDigitDashWsWsAlpha 232 | // 2014-12-16 06:20:00 UTC 233 | // stateDigitDashWsWsAMPMMaybe 234 | // 2014-04-26 05:24:37 PM 235 | // stateDigitDashWsWsOffset 236 | // 2006-01-02 15:04:05 -0700 237 | // stateDigitDashWsWsOffsetColon 238 | // 2006-01-02 15:04:05 -07:00 239 | // stateDigitDashWsWsOffsetColonAlpha 240 | // 2015-02-18 00:12:00 +00:00 UTC 241 | // stateDigitDashWsWsOffsetAlpha 242 | // 2015-02-18 00:12:00 +0000 UTC 243 | switch r { 244 | case 'A', 'P': 245 | state = stateDigitDashWsWsAMPMMaybe 246 | case '+', '-': 247 | state = stateDigitDashWsWsOffset 248 | default: 249 | if unicode.IsLetter(r) { 250 | // 2014-12-16 06:20:00 UTC 251 | state = stateDigitDashWsWsAlpha 252 | break iterRunes 253 | } 254 | } 255 | 256 | case stateDigitDashWsWsAMPMMaybe: 257 | if r == 'M' { 258 | return parse("2006-01-02 03:04:05 PM", datestr, loc) 259 | } 260 | state = stateDigitDashWsWsAlpha 261 | 262 | case stateDigitDashWsWsOffset: 263 | // stateDigitDashWsWsOffset 264 | // 2006-01-02 15:04:05 -0700 265 | // stateDigitDashWsWsOffsetColon 266 | // 2006-01-02 15:04:05 -07:00 267 | // stateDigitDashWsWsOffsetColonAlpha 268 | // 2015-02-18 00:12:00 +00:00 UTC 269 | // stateDigitDashWsWsOffsetAlpha 270 | // 2015-02-18 00:12:00 +0000 UTC 271 | if r == ':' { 272 | state = stateDigitDashWsWsOffsetColon 273 | } else if unicode.IsLetter(r) { 274 | // 2015-02-18 00:12:00 +0000 UTC 275 | state = stateDigitDashWsWsOffsetAlpha 276 | break iterRunes 277 | } 278 | 279 | case stateDigitDashWsWsOffsetColon: 280 | // stateDigitDashWsWsOffsetColon 281 | // 2006-01-02 15:04:05 -07:00 282 | // stateDigitDashWsWsOffsetColonAlpha 283 | // 2015-02-18 00:12:00 +00:00 UTC 284 | if unicode.IsLetter(r) { 285 | // 2015-02-18 00:12:00 +00:00 UTC 286 | state = stateDigitDashWsWsOffsetColonAlpha 287 | break iterRunes 288 | } 289 | 290 | case stateDigitDashWsPeriod: 291 | // 2014-04-26 17:24:37.3186369 292 | // 2017-01-27 00:07:31.945167 293 | // 2012-08-03 18:31:59.257000000 294 | // 2016-03-14 00:00:00.000 295 | // stateDigitDashWsPeriodOffset 296 | // 2017-01-27 00:07:31.945167 +0000 297 | // 2016-03-14 00:00:00.000 +0000 298 | // stateDigitDashWsPeriodOffsetAlpha 299 | // 2017-01-27 00:07:31.945167 +0000 UTC 300 | // 2016-03-14 00:00:00.000 +0000 UTC 301 | // stateDigitDashWsPeriodAlpha 302 | // 2014-12-16 06:20:00.000 UTC 303 | if unicode.IsLetter(r) { 304 | // 2014-12-16 06:20:00.000 UTC 305 | state = stateDigitDashWsPeriodAlpha 306 | break iterRunes 307 | } else if r == '+' || r == '-' { 308 | state = stateDigitDashWsPeriodOffset 309 | } 310 | case stateDigitDashWsPeriodOffset: 311 | // 2017-01-27 00:07:31.945167 +0000 312 | // 2016-03-14 00:00:00.000 +0000 313 | // stateDigitDashWsPeriodOffsetAlpha 314 | // 2017-01-27 00:07:31.945167 +0000 UTC 315 | // 2016-03-14 00:00:00.000 +0000 UTC 316 | if unicode.IsLetter(r) { 317 | // 2014-12-16 06:20:00.000 UTC 318 | // 2017-01-27 00:07:31.945167 +0000 UTC 319 | // 2016-03-14 00:00:00.000 +0000 UTC 320 | state = stateDigitDashWsPeriodOffsetAlpha 321 | break iterRunes 322 | } 323 | case stateDigitDashT: // starts digit then dash 02- then T 324 | // stateDigitDashT 325 | // 2006-01-02T15:04:05 326 | // stateDigitDashTZ 327 | // 2006-01-02T15:04:05.999999999Z 328 | // 2006-01-02T15:04:05.99999999Z 329 | // 2006-01-02T15:04:05.9999999Z 330 | // 2006-01-02T15:04:05.999999Z 331 | // 2006-01-02T15:04:05.99999Z 332 | // 2006-01-02T15:04:05.9999Z 333 | // 2006-01-02T15:04:05.999Z 334 | // 2006-01-02T15:04:05.99Z 335 | // 2009-08-12T22:15Z 336 | // stateDigitDashTZDigit 337 | // 2006-01-02T15:04:05.999999999Z07:00 338 | // 2006-01-02T15:04:05Z07:00 339 | // With another dash aka time-zone at end 340 | // stateDigitDashTOffset 341 | // stateDigitDashTOffsetColon 342 | // 2017-06-25T17:46:57.45706582-07:00 343 | // 2017-06-25T17:46:57+04:00 344 | // 2006-01-02T15:04:05+0000 345 | switch r { 346 | case '-', '+': 347 | state = stateDigitDashTOffset 348 | case 'Z': 349 | state = stateDigitDashTZ 350 | } 351 | case stateDigitDashTZ: 352 | if unicode.IsDigit(r) { 353 | state = stateDigitDashTZDigit 354 | } 355 | case stateDigitDashTOffset: 356 | if r == ':' { 357 | state = stateDigitDashTOffsetColon 358 | } 359 | case stateDigitSlash: // starts digit then slash 02/ 360 | // 2014/07/10 06:55:38.156283 361 | // 03/19/2012 10:11:59 362 | // 04/2/2014 03:00:37 363 | // 3/1/2012 10:11:59 364 | // 4/8/2014 22:05 365 | // 3/1/2014 366 | // 10/13/2014 367 | // 01/02/2006 368 | // 1/2/06 369 | if unicode.IsDigit(r) || r == '/' { 370 | continue 371 | } 372 | switch r { 373 | case ' ': 374 | state = stateDigitSlashWS 375 | } 376 | case stateDigitSlashWS: // starts digit then slash 02/ more digits/slashes then whitespace 377 | // 2014/07/10 06:55:38.156283 378 | // 03/19/2012 10:11:59 379 | // 04/2/2014 03:00:37 380 | // 3/1/2012 10:11:59 381 | // 4/8/2014 22:05 382 | switch r { 383 | case ':': 384 | state = stateDigitSlashWSColon 385 | } 386 | case stateDigitSlashWSColon: // starts digit then slash 02/ more digits/slashes then whitespace 387 | // 2014/07/10 06:55:38.156283 388 | // 03/19/2012 10:11:59 389 | // 04/2/2014 03:00:37 390 | // 3/1/2012 10:11:59 391 | // 4/8/2014 22:05 392 | // 3/1/2012 10:11:59 AM 393 | switch r { 394 | case ':': 395 | state = stateDigitSlashWSColonColon 396 | case 'A', 'P': 397 | state = stateDigitSlashWSColonAMPM 398 | } 399 | case stateDigitSlashWSColonColon: // starts digit then slash 02/ more digits/slashes then whitespace 400 | // 2014/07/10 06:55:38.156283 401 | // 03/19/2012 10:11:59 402 | // 04/2/2014 03:00:37 403 | // 3/1/2012 10:11:59 404 | // 4/8/2014 22:05 405 | // 3/1/2012 10:11:59 AM 406 | switch r { 407 | case 'A', 'P': 408 | state = stateDigitSlashWSColonColonAMPM 409 | } 410 | case stateDigitAlpha: 411 | // 12 Feb 2006, 19:17 412 | // 12 Feb 2006, 19:17:22 413 | // 2006年01月02日 414 | switch { 415 | case len(datestr) == len("02 Jan 2006, 15:04"): 416 | return parse("02 Jan 2006, 15:04", datestr, loc) 417 | case len(datestr) == len("02 Jan 2006, 15:04:05"): 418 | return parse("02 Jan 2006, 15:04:05", datestr, loc) 419 | case len(datestr) == len("2006年01月02日"): 420 | return parse("2006年01月02日", datestr, loc) 421 | case len(datestr) == len("2006年01月02日 15:04"): 422 | return parse("2006年01月02日 15:04", datestr, loc) 423 | } 424 | case stateAlpha: // starts alpha 425 | // stateAlphaWS 426 | // Mon Jan _2 15:04:05 2006 427 | // Mon Jan _2 15:04:05 MST 2006 428 | // Mon Jan 02 15:04:05 -0700 2006 429 | // Mon Aug 10 15:44:11 UTC+0100 2015 430 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 431 | // stateAlphaWSDigitComma 432 | // May 8, 2009 5:57:51 PM 433 | // 434 | // stateWeekdayComma 435 | // Monday, 02-Jan-06 15:04:05 MST 436 | // stateWeekdayCommaOffset 437 | // Monday, 02 Jan 2006 15:04:05 -0700 438 | // Monday, 02 Jan 2006 15:04:05 +0100 439 | // stateWeekdayAbbrevComma 440 | // Mon, 02-Jan-06 15:04:05 MST 441 | // Mon, 02 Jan 2006 15:04:05 MST 442 | // stateWeekdayAbbrevCommaOffset 443 | // Mon, 02 Jan 2006 15:04:05 -0700 444 | // Thu, 13 Jul 2017 08:58:40 +0100 445 | // stateWeekdayAbbrevCommaOffsetZone 446 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 447 | switch { 448 | case unicode.IsLetter(r): 449 | continue 450 | case r == ' ': 451 | state = stateAlphaWS 452 | case r == ',': 453 | if i == 3 { 454 | state = stateWeekdayAbbrevComma 455 | } else { 456 | state = stateWeekdayComma 457 | } 458 | } 459 | case stateWeekdayComma: // Starts alpha then comma 460 | // Mon, 02-Jan-06 15:04:05 MST 461 | // Mon, 02 Jan 2006 15:04:05 MST 462 | // stateWeekdayCommaOffset 463 | // Monday, 02 Jan 2006 15:04:05 -0700 464 | // Monday, 02 Jan 2006 15:04:05 +0100 465 | switch { 466 | case r == '-': 467 | if i < 15 { 468 | return parse("Monday, 02-Jan-06 15:04:05 MST", datestr, loc) 469 | } 470 | state = stateWeekdayCommaOffset 471 | case r == '+': 472 | state = stateWeekdayCommaOffset 473 | } 474 | case stateWeekdayAbbrevComma: // Starts alpha then comma 475 | // Mon, 02-Jan-06 15:04:05 MST 476 | // Mon, 02 Jan 2006 15:04:05 MST 477 | // stateWeekdayAbbrevCommaOffset 478 | // Mon, 02 Jan 2006 15:04:05 -0700 479 | // Thu, 13 Jul 2017 08:58:40 +0100 480 | // stateWeekdayAbbrevCommaOffsetZone 481 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 482 | switch { 483 | case r == '-': 484 | if i < 15 { 485 | return parse("Mon, 02-Jan-06 15:04:05 MST", datestr, loc) 486 | } 487 | state = stateWeekdayAbbrevCommaOffset 488 | case r == '+': 489 | state = stateWeekdayAbbrevCommaOffset 490 | } 491 | 492 | case stateWeekdayAbbrevCommaOffset: 493 | // stateWeekdayAbbrevCommaOffset 494 | // Mon, 02 Jan 2006 15:04:05 -0700 495 | // Thu, 13 Jul 2017 08:58:40 +0100 496 | // stateWeekdayAbbrevCommaOffsetZone 497 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 498 | if r == '(' { 499 | state = stateWeekdayAbbrevCommaOffsetZone 500 | } 501 | 502 | case stateAlphaWS: // Starts alpha then whitespace 503 | // Mon Jan _2 15:04:05 2006 504 | // Mon Jan _2 15:04:05 MST 2006 505 | // Mon Jan 02 15:04:05 -0700 2006 506 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 507 | // Mon Aug 10 15:44:11 UTC+0100 2015 508 | switch { 509 | case unicode.IsLetter(r): 510 | state = stateAlphaWSAlpha 511 | case unicode.IsDigit(r): 512 | state = stateAlphaWSDigitComma 513 | } 514 | 515 | case stateAlphaWSDigitComma: // Starts Alpha, whitespace, digit, comma 516 | // May 8, 2009 5:57:51 PM 517 | // May 8, 2009 518 | if len(datestr) == len("May 8, 2009") { 519 | return parse("Jan 2, 2006", datestr, loc) 520 | } 521 | return parse("Jan 2, 2006 3:04:05 PM", datestr, loc) 522 | 523 | case stateAlphaWSAlpha: // Alpha, whitespace, alpha 524 | // Mon Jan _2 15:04:05 2006 525 | // Mon Jan 02 15:04:05 -0700 2006 526 | // Mon Jan _2 15:04:05 MST 2006 527 | // Mon Aug 10 15:44:11 UTC+0100 2015 528 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 529 | if r == ':' { 530 | state = stateAlphaWSAlphaColon 531 | } 532 | case stateAlphaWSAlphaColon: // Alpha, whitespace, alpha, : 533 | // Mon Jan _2 15:04:05 2006 534 | // Mon Jan 02 15:04:05 -0700 2006 535 | // Mon Jan _2 15:04:05 MST 2006 536 | // Mon Aug 10 15:44:11 UTC+0100 2015 537 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 538 | if unicode.IsLetter(r) { 539 | state = stateAlphaWSAlphaColonAlpha 540 | } else if r == '-' || r == '+' { 541 | state = stateAlphaWSAlphaColonOffset 542 | } 543 | case stateAlphaWSAlphaColonAlpha: // Alpha, whitespace, alpha, :, alpha 544 | // Mon Jan _2 15:04:05 MST 2006 545 | // Mon Aug 10 15:44:11 UTC+0100 2015 546 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 547 | if r == '+' { 548 | state = stateAlphaWSAlphaColonAlphaOffset 549 | } 550 | case stateAlphaWSAlphaColonAlphaOffset: // Alpha, whitespace, alpha, : , alpha, offset, ? 551 | // Mon Aug 10 15:44:11 UTC+0100 2015 552 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 553 | if unicode.IsLetter(r) { 554 | state = stateAlphaWSAlphaColonAlphaOffsetAlpha 555 | } 556 | case stateHowLongAgo: 557 | if unicode.IsLetter(r) { 558 | break iterRunes 559 | } 560 | default: 561 | break iterRunes 562 | } 563 | } 564 | 565 | switch state { 566 | case stateDigit: 567 | // unixy timestamps ish 568 | // 1499979655583057426 nanoseconds 569 | // 1499979795437000 micro-seconds 570 | // 1499979795437 milliseconds 571 | // 1384216367189 572 | // 1332151919 seconds 573 | // 20140601 yyyymmdd 574 | // 2014 yyyy 575 | t := time.Time{} 576 | if len(datestr) > len("1499979795437000") { 577 | if nanoSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 578 | t = time.Unix(0, nanoSecs) 579 | } 580 | } else if len(datestr) > len("1499979795437") { 581 | if microSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 582 | t = time.Unix(0, microSecs*1000) 583 | } 584 | } else if len(datestr) > len("1332151919") { 585 | if miliSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 586 | t = time.Unix(0, miliSecs*1000*1000) 587 | } 588 | } else if len(datestr) == len("20140601") { 589 | return parse("20060102", datestr, loc) 590 | } else if len(datestr) == len("2014") { 591 | return parse("2006", datestr, loc) 592 | } 593 | if t.IsZero() { 594 | if secs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 595 | if secs < 0 { 596 | // Now, for unix-seconds we aren't going to guess a lot 597 | // nothing before unix-epoch 598 | } else { 599 | t = time.Unix(secs, 0) 600 | } 601 | } 602 | } 603 | if !t.IsZero() { 604 | if loc == nil { 605 | return t, nil 606 | } 607 | return t.In(loc), nil 608 | } 609 | 610 | case stateDigitDash: // starts digit then dash 02- 611 | // 2006-01-02 612 | // 2006-01 613 | if len(datestr) == len("2014-04-26") { 614 | return parse("2006-01-02", datestr, loc) 615 | } else if len(datestr) == len("2014-04") { 616 | return parse("2006-01", datestr, loc) 617 | } 618 | case stateDigitDashAlpha: 619 | // 2013-Feb-03 620 | return parse("2006-Jan-02", datestr, loc) 621 | 622 | case stateDigitDashTOffset: 623 | // 2006-01-02T15:04:05+0000 624 | return parse("2006-01-02T15:04:05-0700", datestr, loc) 625 | 626 | case stateDigitDashTOffsetColon: 627 | // With another +/- time-zone at end 628 | // 2006-01-02T15:04:05.999999999+07:00 629 | // 2006-01-02T15:04:05.999999999-07:00 630 | // 2006-01-02T15:04:05.999999+07:00 631 | // 2006-01-02T15:04:05.999999-07:00 632 | // 2006-01-02T15:04:05.999+07:00 633 | // 2006-01-02T15:04:05.999-07:00 634 | // 2006-01-02T15:04:05+07:00 635 | // 2006-01-02T15:04:05-07:00 636 | return parse("2006-01-02T15:04:05-07:00", datestr, loc) 637 | 638 | case stateDigitDashT: // starts digit then dash 02- then T 639 | // 2006-01-02T15:04:05.999999 640 | // 2006-01-02T15:04:05.999999 641 | return parse("2006-01-02T15:04:05", datestr, loc) 642 | 643 | case stateDigitDashTZDigit: 644 | // With a time-zone at end after Z 645 | // 2006-01-02T15:04:05.999999999Z07:00 646 | // 2006-01-02T15:04:05Z07:00 647 | // RFC3339 = "2006-01-02T15:04:05Z07:00" 648 | // RFC3339Nano = "2006-01-02T15:04:05.999999999Z07:00" 649 | return time.Time{}, fmt.Errorf("RFC339 Dates may not contain both Z & Offset for %q see https://github.com/golang/go/issues/5294", datestr) 650 | 651 | case stateDigitDashTZ: // starts digit then dash 02- then T Then Z 652 | // 2006-01-02T15:04:05.999999999Z 653 | // 2006-01-02T15:04:05.99999999Z 654 | // 2006-01-02T15:04:05.9999999Z 655 | // 2006-01-02T15:04:05.999999Z 656 | // 2006-01-02T15:04:05.99999Z 657 | // 2006-01-02T15:04:05.9999Z 658 | // 2006-01-02T15:04:05.999Z 659 | // 2006-01-02T15:04:05.99Z 660 | // 2009-08-12T22:15Z -- No seconds/milliseconds 661 | switch len(datestr) { 662 | case len("2009-08-12T22:15Z"): 663 | return parse("2006-01-02T15:04Z", datestr, loc) 664 | default: 665 | return parse("2006-01-02T15:04:05Z", datestr, loc) 666 | } 667 | case stateDigitDashWs: // starts digit then dash 02- then whitespace 1 << 2 << 5 + 3 668 | // 2013-04-01 22:43:22 669 | return parse("2006-01-02 15:04:05", datestr, loc) 670 | 671 | case stateDigitDashWsWsOffset: 672 | // 2006-01-02 15:04:05 -0700 673 | return parse("2006-01-02 15:04:05 -0700", datestr, loc) 674 | 675 | case stateDigitDashWsWsOffsetColon: 676 | // 2006-01-02 15:04:05 -07:00 677 | return parse("2006-01-02 15:04:05 -07:00", datestr, loc) 678 | 679 | case stateDigitDashWsWsOffsetAlpha: 680 | // 2015-02-18 00:12:00 +0000 UTC 681 | t, err := parse("2006-01-02 15:04:05 -0700 UTC", datestr, loc) 682 | if err == nil { 683 | return t, nil 684 | } 685 | return parse("2006-01-02 15:04:05 +0000 GMT", datestr, loc) 686 | 687 | case stateDigitDashWsWsOffsetColonAlpha: 688 | // 2015-02-18 00:12:00 +00:00 UTC 689 | return parse("2006-01-02 15:04:05 -07:00 UTC", datestr, loc) 690 | 691 | case stateDigitDashWsOffset: 692 | // 2017-07-19 03:21:51+00:00 693 | return parse("2006-01-02 15:04:05-07:00", datestr, loc) 694 | 695 | case stateDigitDashWsWsAlpha: 696 | // 2014-12-16 06:20:00 UTC 697 | t, err := parse("2006-01-02 15:04:05 UTC", datestr, loc) 698 | if err == nil { 699 | return t, nil 700 | } 701 | t, err = parse("2006-01-02 15:04:05 GMT", datestr, loc) 702 | if err == nil { 703 | return t, nil 704 | } 705 | if len(datestr) > len("2006-01-02 03:04:05") { 706 | t, err = parse("2006-01-02 03:04:05", datestr[:len("2006-01-02 03:04:05")], loc) 707 | if err == nil { 708 | return t, nil 709 | } 710 | } 711 | 712 | case stateDigitDashWsPeriod: 713 | // 2012-08-03 18:31:59.257000000 714 | // 2014-04-26 17:24:37.3186369 715 | // 2017-01-27 00:07:31.945167 716 | // 2016-03-14 00:00:00.000 717 | return parse("2006-01-02 15:04:05", datestr, loc) 718 | 719 | case stateDigitDashWsPeriodAlpha: 720 | // 2012-08-03 18:31:59.257000000 UTC 721 | // 2014-04-26 17:24:37.3186369 UTC 722 | // 2017-01-27 00:07:31.945167 UTC 723 | // 2016-03-14 00:00:00.000 UTC 724 | return parse("2006-01-02 15:04:05 UTC", datestr, loc) 725 | 726 | case stateDigitDashWsPeriodOffset: 727 | // 2012-08-03 18:31:59.257000000 +0000 728 | // 2014-04-26 17:24:37.3186369 +0000 729 | // 2017-01-27 00:07:31.945167 +0000 730 | // 2016-03-14 00:00:00.000 +0000 731 | return parse("2006-01-02 15:04:05 -0700", datestr, loc) 732 | 733 | case stateDigitDashWsPeriodOffsetAlpha: 734 | // 2012-08-03 18:31:59.257000000 +0000 UTC 735 | // 2014-04-26 17:24:37.3186369 +0000 UTC 736 | // 2017-01-27 00:07:31.945167 +0000 UTC 737 | // 2016-03-14 00:00:00.000 +0000 UTC 738 | return parse("2006-01-02 15:04:05 -0700 UTC", datestr, loc) 739 | 740 | case stateAlphaWSAlphaColon: 741 | // Mon Jan _2 15:04:05 2006 742 | return parse(time.ANSIC, datestr, loc) 743 | 744 | case stateAlphaWSAlphaColonOffset: 745 | // Mon Jan 02 15:04:05 -0700 2006 746 | return parse(time.RubyDate, datestr, loc) 747 | 748 | case stateAlphaWSAlphaColonAlpha: 749 | // Mon Jan _2 15:04:05 MST 2006 750 | return parse(time.UnixDate, datestr, loc) 751 | 752 | case stateAlphaWSAlphaColonAlphaOffset: 753 | // Mon Aug 10 15:44:11 UTC+0100 2015 754 | return parse("Mon Jan 02 15:04:05 MST-0700 2006", datestr, loc) 755 | 756 | case stateAlphaWSAlphaColonAlphaOffsetAlpha: 757 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 758 | if len(datestr) > len("Mon Jan 02 2006 15:04:05 MST-0700") { 759 | // What effing time stamp is this? 760 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 761 | dateTmp := datestr[:33] 762 | return parse("Mon Jan 02 2006 15:04:05 MST-0700", dateTmp, loc) 763 | } 764 | case stateDigitSlash: // starts digit then slash 02/ (but nothing else) 765 | // 3/1/2014 766 | // 10/13/2014 767 | // 01/02/2006 768 | // 2014/10/13 769 | if firstSlash == 4 { 770 | if len(datestr) == len("2006/01/02") { 771 | return parse("2006/01/02", datestr, loc) 772 | } 773 | return parse("2006/1/2", datestr, loc) 774 | } 775 | for _, parseFormat := range shortDates { 776 | if t, err := parse(parseFormat, datestr, loc); err == nil { 777 | return t, nil 778 | } 779 | } 780 | 781 | case stateDigitSlashWSColon: // starts digit then slash 02/ more digits/slashes then whitespace 782 | // 4/8/2014 22:05 783 | // 04/08/2014 22:05 784 | // 2014/4/8 22:05 785 | // 2014/04/08 22:05 786 | 787 | if firstSlash == 4 { 788 | for _, layout := range []string{"2006/01/02 15:04", "2006/1/2 15:04", "2006/01/2 15:04", "2006/1/02 15:04"} { 789 | if t, err := parse(layout, datestr, loc); err == nil { 790 | return t, nil 791 | } 792 | } 793 | } else { 794 | for _, layout := range []string{"01/02/2006 15:04", "01/2/2006 15:04", "1/02/2006 15:04", "1/2/2006 15:04"} { 795 | if t, err := parse(layout, datestr, loc); err == nil { 796 | return t, nil 797 | } 798 | } 799 | } 800 | 801 | case stateDigitSlashWSColonAMPM: // starts digit then slash 02/ more digits/slashes then whitespace 802 | // 4/8/2014 22:05 PM 803 | // 04/08/2014 22:05 PM 804 | // 04/08/2014 1:05 PM 805 | // 2014/4/8 22:05 PM 806 | // 2014/04/08 22:05 PM 807 | 808 | if firstSlash == 4 { 809 | for _, layout := range []string{"2006/01/02 03:04 PM", "2006/01/2 03:04 PM", "2006/1/02 03:04 PM", "2006/1/2 03:04 PM", 810 | "2006/01/02 3:04 PM", "2006/01/2 3:04 PM", "2006/1/02 3:04 PM", "2006/1/2 3:04 PM"} { 811 | if t, err := parse(layout, datestr, loc); err == nil { 812 | return t, nil 813 | } 814 | } 815 | } else { 816 | for _, layout := range []string{"01/02/2006 03:04 PM", "01/2/2006 03:04 PM", "1/02/2006 03:04 PM", "1/2/2006 03:04 PM", 817 | "01/02/2006 3:04 PM", "01/2/2006 3:04 PM", "1/02/2006 3:04 PM", "1/2/2006 3:04 PM"} { 818 | if t, err := parse(layout, datestr, loc); err == nil { 819 | return t, nil 820 | } 821 | 822 | } 823 | } 824 | 825 | case stateDigitSlashWSColonColon: // starts digit then slash 02/ more digits/slashes then whitespace double colons 826 | // 2014/07/10 06:55:38.156283 827 | // 03/19/2012 10:11:59 828 | // 3/1/2012 10:11:59 829 | // 03/1/2012 10:11:59 830 | // 3/01/2012 10:11:59 831 | if firstSlash == 4 { 832 | for _, layout := range []string{"2006/01/02 15:04:05", "2006/1/02 15:04:05", "2006/01/2 15:04:05", "2006/1/2 15:04:05"} { 833 | if t, err := parse(layout, datestr, loc); err == nil { 834 | return t, nil 835 | } 836 | } 837 | } else { 838 | for _, layout := range []string{"01/02/2006 15:04:05", "1/02/2006 15:04:05", "01/2/2006 15:04:05", "1/2/2006 15:04:05"} { 839 | if t, err := parse(layout, datestr, loc); err == nil { 840 | return t, nil 841 | } 842 | } 843 | } 844 | 845 | case stateDigitSlashWSColonColonAMPM: // starts digit then slash 02/ more digits/slashes then whitespace double colons 846 | // 2014/07/10 06:55:38.156283 PM 847 | // 03/19/2012 10:11:59 PM 848 | // 3/1/2012 10:11:59 PM 849 | // 03/1/2012 10:11:59 PM 850 | // 3/01/2012 10:11:59 PM 851 | 852 | if firstSlash == 4 { 853 | for _, layout := range []string{"2006/01/02 03:04:05 PM", "2006/1/02 03:04:05 PM", "2006/01/2 03:04:05 PM", "2006/1/2 03:04:05 PM", 854 | "2006/01/02 3:04:05 PM", "2006/1/02 3:04:05 PM", "2006/01/2 3:04:05 PM", "2006/1/2 3:04:05 PM"} { 855 | if t, err := parse(layout, datestr, loc); err == nil { 856 | return t, nil 857 | } 858 | } 859 | } else { 860 | for _, layout := range []string{"01/02/2006 03:04:05 PM", "1/02/2006 03:04:05 PM", "01/2/2006 03:04:05 PM", "1/2/2006 03:04:05 PM"} { 861 | if t, err := parse(layout, datestr, loc); err == nil { 862 | return t, nil 863 | } 864 | } 865 | } 866 | 867 | case stateWeekdayCommaOffset: 868 | // Monday, 02 Jan 2006 15:04:05 -0700 869 | // Monday, 02 Jan 2006 15:04:05 +0100 870 | return parse("Monday, 02 Jan 2006 15:04:05 -0700", datestr, loc) 871 | case stateWeekdayAbbrevComma: // Starts alpha then comma 872 | // Mon, 02-Jan-06 15:04:05 MST 873 | // Mon, 02 Jan 2006 15:04:05 MST 874 | return parse("Mon, 02 Jan 2006 15:04:05 MST", datestr, loc) 875 | case stateWeekdayAbbrevCommaOffset: 876 | // Mon, 02 Jan 2006 15:04:05 -0700 877 | // Thu, 13 Jul 2017 08:58:40 +0100 878 | // RFC1123Z = "Mon, 02 Jan 2006 15:04:05 -0700" // RFC1123 with numeric zone 879 | return parse("Mon, 02 Jan 2006 15:04:05 -0700", datestr, loc) 880 | case stateWeekdayAbbrevCommaOffsetZone: 881 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 882 | return parse("Mon, 02 Jan 2006 15:04:05 -0700 (CEST)", datestr, loc) 883 | case stateHowLongAgo: 884 | // 1 minutes ago 885 | // 1 hours ago 886 | // 1 day ago 887 | switch len(datestr) { 888 | case len("1 minutes ago"), len("10 minutes ago"), len("100 minutes ago"): 889 | return agoTime(datestr, time.Minute) 890 | case len("1 hours ago"), len("10 hours ago"): 891 | return agoTime(datestr, time.Hour) 892 | case len("1 day ago"), len("10 day ago"): 893 | return agoTime(datestr, Day) 894 | } 895 | } 896 | 897 | return time.Time{}, fmt.Errorf("Could not find date format for %s", datestr) 898 | } 899 | 900 | func agoTime(datestr string, d time.Duration) (time.Time, error) { 901 | dstrs := strings.Split(datestr, " ") 902 | m, err := strconv.Atoi(dstrs[0]) 903 | if err != nil { 904 | return time.Time{}, err 905 | } 906 | return time.Now().Add(-d * time.Duration(m)), nil 907 | } 908 | -------------------------------------------------------------------------------- /parseany.go: -------------------------------------------------------------------------------- 1 | // Package dateparse parses date-strings without knowing the format 2 | // in advance, using a fast lex based approach to eliminate shotgun 3 | // attempts. It leans towards US style dates when there is a conflict. 4 | package dateparse 5 | 6 | import ( 7 | "fmt" 8 | "strconv" 9 | "strings" 10 | "time" 11 | "unicode" 12 | ) 13 | 14 | // _ _ 15 | // | | | | 16 | // __| | __ _ | |_ ___ _ __ __ _ _ __ ___ ___ 17 | // / _` | / _` | | __| / _ \ | '_ \ / _` | | '__| / __| / _ \ 18 | // | (_| | | (_| | | |_ | __/ | |_) | | (_| | | | \__ \ | __/ 19 | // \__,_| \__,_| \__| \___| | .__/ \__,_| |_| |___/ \___| 20 | // | | 21 | // |_| 22 | 23 | type dateState int 24 | 25 | const ( 26 | stateStart dateState = iota 27 | stateDigit 28 | stateDigitDash 29 | stateDigitDashAlpha 30 | stateDigitDashWs 31 | stateDigitDashWsWs 32 | stateDigitDashWsWsAMPMMaybe 33 | stateDigitDashWsWsOffset 34 | stateDigitDashWsWsOffsetAlpha 35 | stateDigitDashWsWsOffsetColonAlpha 36 | stateDigitDashWsWsOffsetColon 37 | stateDigitDashWsOffset 38 | stateDigitDashWsWsAlpha 39 | stateDigitDashWsPeriod 40 | stateDigitDashWsPeriodAlpha 41 | stateDigitDashWsPeriodOffset 42 | stateDigitDashWsPeriodOffsetAlpha 43 | stateDigitDashT 44 | stateDigitDashTZ 45 | stateDigitDashTZDigit 46 | stateDigitDashTOffset 47 | stateDigitDashTOffsetColon 48 | stateDigitSlash 49 | stateDigitSlashWS 50 | stateDigitSlashWSColon 51 | stateDigitSlashWSColonAMPM 52 | stateDigitSlashWSColonColon 53 | stateDigitSlashWSColonColonAMPM 54 | stateDigitAlpha 55 | stateAlpha 56 | stateAlphaWS 57 | stateAlphaWSDigitComma 58 | stateAlphaWSAlpha 59 | stateAlphaWSAlphaColon 60 | stateAlphaWSAlphaColonOffset 61 | stateAlphaWSAlphaColonAlpha 62 | stateAlphaWSAlphaColonAlphaOffset 63 | stateAlphaWSAlphaColonAlphaOffsetAlpha 64 | stateWeekdayComma 65 | stateWeekdayCommaOffset 66 | stateWeekdayAbbrevComma 67 | stateWeekdayAbbrevCommaOffset 68 | stateWeekdayAbbrevCommaOffsetZone 69 | stateHowLongAgo 70 | ) 71 | 72 | const ( 73 | Day = time.Hour * 24 74 | ) 75 | 76 | var ( 77 | shortDates = []string{"01/02/2006", "1/2/2006", "06/01/02", "01/02/06", "1/2/06"} 78 | ) 79 | 80 | // ParseAny parse an unknown date format, detect the layout, parse. 81 | // Normal parse. Equivalent Timezone rules as time.Parse() 82 | func ParseAny(datestr string) (time.Time, error) { 83 | return parseTime(datestr, nil) 84 | } 85 | 86 | // ParseIn with Location, equivalent to time.ParseInLocation() timezone/offset 87 | // rules. Using location arg, if timezone/offset info exists in the 88 | // datestring, it uses the given location rules for any zone interpretation. 89 | // That is, MST means one thing when using America/Denver and something else 90 | // in other locations. 91 | func ParseIn(datestr string, loc *time.Location) (time.Time, error) { 92 | return parseTime(datestr, loc) 93 | } 94 | 95 | // ParseLocal Given an unknown date format, detect the layout, 96 | // using time.Local, parse. 97 | // 98 | // Set Location to time.Local. Same as ParseIn Location but lazily uses 99 | // the global time.Local variable for Location argument. 100 | // 101 | // denverLoc, _ := time.LoadLocation("America/Denver") 102 | // time.Local = denverLoc 103 | // 104 | // t, err := dateparse.ParseLocal("3/1/2014") 105 | // 106 | // Equivalent to: 107 | // 108 | // t, err := dateparse.ParseIn("3/1/2014", denverLoc) 109 | // 110 | func ParseLocal(datestr string) (time.Time, error) { 111 | return parseTime(datestr, time.Local) 112 | } 113 | 114 | // MustParse parse a date, and panic if it can't be parsed. Used for testing. 115 | // Not recommended for most use-cases. 116 | func MustParse(datestr string) time.Time { 117 | t, err := parseTime(datestr, nil) 118 | if err != nil { 119 | panic(err.Error()) 120 | } 121 | return t 122 | } 123 | 124 | func parse(layout, datestr string, loc *time.Location) (time.Time, error) { 125 | if loc == nil { 126 | return time.Parse(layout, datestr) 127 | } 128 | return time.ParseInLocation(layout, datestr, loc) 129 | } 130 | 131 | func parseTime(datestr string, loc *time.Location) (time.Time, error) { 132 | state := stateStart 133 | 134 | firstSlash := 0 135 | 136 | // General strategy is to read rune by rune through the date looking for 137 | // certain hints of what type of date we are dealing with. 138 | // Hopefully we only need to read about 5 or 6 bytes before 139 | // we figure it out and then attempt a parse 140 | iterRunes: 141 | for i := 0; i < len(datestr); i++ { 142 | r := rune(datestr[i]) 143 | // r, bytesConsumed := utf8.DecodeRuneInString(datestr[ri:]) 144 | // if bytesConsumed > 1 { 145 | // ri += (bytesConsumed - 1) 146 | // } 147 | 148 | switch state { 149 | case stateStart: 150 | if unicode.IsDigit(r) { 151 | state = stateDigit 152 | } else if unicode.IsLetter(r) { 153 | state = stateAlpha 154 | } 155 | case stateDigit: // starts digits 156 | if unicode.IsDigit(r) { 157 | continue 158 | } else if unicode.IsLetter(r) { 159 | state = stateDigitAlpha 160 | continue 161 | } 162 | switch r { 163 | case '-', '\u2212': 164 | state = stateDigitDash 165 | case '/': 166 | state = stateDigitSlash 167 | firstSlash = i 168 | } 169 | case stateDigitDash: // starts digit then dash 02- 170 | // 2006-01-02T15:04:05Z07:00 171 | // 2017-06-25T17:46:57.45706582-07:00 172 | // 2006-01-02T15:04:05.999999999Z07:00 173 | // 2006-01-02T15:04:05+0000 174 | // 2012-08-03 18:31:59.257000000 175 | // 2014-04-26 17:24:37.3186369 176 | // 2017-01-27 00:07:31.945167 177 | // 2016-03-14 00:00:00.000 178 | // 2014-05-11 08:20:13,787 179 | // 2017-07-19 03:21:51+00:00 180 | // 2006-01-02 181 | // 2013-04-01 22:43:22 182 | // 2014-04-26 05:24:37 PM 183 | // 2013-Feb-03 184 | switch { 185 | case r == ' ': 186 | state = stateDigitDashWs 187 | case r == 'T': 188 | state = stateDigitDashT 189 | default: 190 | if unicode.IsLetter(r) { 191 | state = stateDigitDashAlpha 192 | break iterRunes 193 | } 194 | } 195 | case stateDigitDashWs: 196 | // 2013-04-01 22:43:22 197 | // 2014-05-11 08:20:13,787 198 | // stateDigitDashWsWs 199 | // 2014-04-26 05:24:37 PM 200 | // 2014-12-16 06:20:00 UTC 201 | // 2015-02-18 00:12:00 +0000 UTC 202 | // 2006-01-02 15:04:05 -0700 203 | // 2006-01-02 15:04:05 -07:00 204 | // stateDigitDashWsOffset 205 | // 2017-07-19 03:21:51+00:00 206 | // stateDigitDashWsPeriod 207 | // 2014-04-26 17:24:37.3186369 208 | // 2017-01-27 00:07:31.945167 209 | // 2012-08-03 18:31:59.257000000 210 | // 2016-03-14 00:00:00.000 211 | // stateDigitDashWsPeriodOffset 212 | // 2017-01-27 00:07:31.945167 +0000 213 | // 2016-03-14 00:00:00.000 +0000 214 | // stateDigitDashWsPeriodOffsetAlpha 215 | // 2017-01-27 00:07:31.945167 +0000 UTC 216 | // 2016-03-14 00:00:00.000 +0000 UTC 217 | // stateDigitDashWsPeriodAlpha 218 | // 2014-12-16 06:20:00.000 UTC 219 | switch r { 220 | case ',': 221 | if len(datestr) == len("2014-05-11 08:20:13,787") { 222 | // go doesn't seem to parse this one natively? or did i miss it? 223 | t, err := parse("2006-01-02 03:04:05", datestr[:i], loc) 224 | if err == nil { 225 | ms, err := strconv.Atoi(datestr[i+1:]) 226 | if err == nil { 227 | return time.Unix(0, t.UnixNano()+int64(ms)*1e6), nil 228 | } 229 | } 230 | return t, err 231 | } 232 | case '-', '+': 233 | state = stateDigitDashWsOffset 234 | case '.': 235 | state = stateDigitDashWsPeriod 236 | case ' ': 237 | state = stateDigitDashWsWs 238 | } 239 | 240 | case stateDigitDashWsWs: 241 | // stateDigitDashWsWsAlpha 242 | // 2014-12-16 06:20:00 UTC 243 | // stateDigitDashWsWsAMPMMaybe 244 | // 2014-04-26 05:24:37 PM 245 | // stateDigitDashWsWsOffset 246 | // 2006-01-02 15:04:05 -0700 247 | // stateDigitDashWsWsOffsetColon 248 | // 2006-01-02 15:04:05 -07:00 249 | // stateDigitDashWsWsOffsetColonAlpha 250 | // 2015-02-18 00:12:00 +00:00 UTC 251 | // stateDigitDashWsWsOffsetAlpha 252 | // 2015-02-18 00:12:00 +0000 UTC 253 | switch r { 254 | case 'A', 'P': 255 | state = stateDigitDashWsWsAMPMMaybe 256 | case '+', '-': 257 | state = stateDigitDashWsWsOffset 258 | default: 259 | if unicode.IsLetter(r) { 260 | // 2014-12-16 06:20:00 UTC 261 | state = stateDigitDashWsWsAlpha 262 | break iterRunes 263 | } 264 | } 265 | 266 | case stateDigitDashWsWsAMPMMaybe: 267 | if r == 'M' { 268 | return parse("2006-01-02 03:04:05 PM", datestr, loc) 269 | } 270 | state = stateDigitDashWsWsAlpha 271 | 272 | case stateDigitDashWsWsOffset: 273 | // stateDigitDashWsWsOffset 274 | // 2006-01-02 15:04:05 -0700 275 | // stateDigitDashWsWsOffsetColon 276 | // 2006-01-02 15:04:05 -07:00 277 | // stateDigitDashWsWsOffsetColonAlpha 278 | // 2015-02-18 00:12:00 +00:00 UTC 279 | // stateDigitDashWsWsOffsetAlpha 280 | // 2015-02-18 00:12:00 +0000 UTC 281 | if r == ':' { 282 | state = stateDigitDashWsWsOffsetColon 283 | } else if unicode.IsLetter(r) { 284 | // 2015-02-18 00:12:00 +0000 UTC 285 | state = stateDigitDashWsWsOffsetAlpha 286 | break iterRunes 287 | } 288 | 289 | case stateDigitDashWsWsOffsetColon: 290 | // stateDigitDashWsWsOffsetColon 291 | // 2006-01-02 15:04:05 -07:00 292 | // stateDigitDashWsWsOffsetColonAlpha 293 | // 2015-02-18 00:12:00 +00:00 UTC 294 | if unicode.IsLetter(r) { 295 | // 2015-02-18 00:12:00 +00:00 UTC 296 | state = stateDigitDashWsWsOffsetColonAlpha 297 | break iterRunes 298 | } 299 | 300 | case stateDigitDashWsPeriod: 301 | // 2014-04-26 17:24:37.3186369 302 | // 2017-01-27 00:07:31.945167 303 | // 2012-08-03 18:31:59.257000000 304 | // 2016-03-14 00:00:00.000 305 | // stateDigitDashWsPeriodOffset 306 | // 2017-01-27 00:07:31.945167 +0000 307 | // 2016-03-14 00:00:00.000 +0000 308 | // stateDigitDashWsPeriodOffsetAlpha 309 | // 2017-01-27 00:07:31.945167 +0000 UTC 310 | // 2016-03-14 00:00:00.000 +0000 UTC 311 | // stateDigitDashWsPeriodAlpha 312 | // 2014-12-16 06:20:00.000 UTC 313 | if unicode.IsLetter(r) { 314 | // 2014-12-16 06:20:00.000 UTC 315 | state = stateDigitDashWsPeriodAlpha 316 | break iterRunes 317 | } else if r == '+' || r == '-' { 318 | state = stateDigitDashWsPeriodOffset 319 | } 320 | case stateDigitDashWsPeriodOffset: 321 | // 2017-01-27 00:07:31.945167 +0000 322 | // 2016-03-14 00:00:00.000 +0000 323 | // stateDigitDashWsPeriodOffsetAlpha 324 | // 2017-01-27 00:07:31.945167 +0000 UTC 325 | // 2016-03-14 00:00:00.000 +0000 UTC 326 | if unicode.IsLetter(r) { 327 | // 2014-12-16 06:20:00.000 UTC 328 | // 2017-01-27 00:07:31.945167 +0000 UTC 329 | // 2016-03-14 00:00:00.000 +0000 UTC 330 | state = stateDigitDashWsPeriodOffsetAlpha 331 | break iterRunes 332 | } 333 | case stateDigitDashT: // starts digit then dash 02- then T 334 | // stateDigitDashT 335 | // 2006-01-02T15:04:05 336 | // stateDigitDashTZ 337 | // 2006-01-02T15:04:05.999999999Z 338 | // 2006-01-02T15:04:05.99999999Z 339 | // 2006-01-02T15:04:05.9999999Z 340 | // 2006-01-02T15:04:05.999999Z 341 | // 2006-01-02T15:04:05.99999Z 342 | // 2006-01-02T15:04:05.9999Z 343 | // 2006-01-02T15:04:05.999Z 344 | // 2006-01-02T15:04:05.99Z 345 | // 2009-08-12T22:15Z 346 | // stateDigitDashTZDigit 347 | // 2006-01-02T15:04:05.999999999Z07:00 348 | // 2006-01-02T15:04:05Z07:00 349 | // With another dash aka time-zone at end 350 | // stateDigitDashTOffset 351 | // stateDigitDashTOffsetColon 352 | // 2017-06-25T17:46:57.45706582-07:00 353 | // 2017-06-25T17:46:57+04:00 354 | // 2006-01-02T15:04:05+0000 355 | switch r { 356 | case '-', '+': 357 | state = stateDigitDashTOffset 358 | case 'Z': 359 | state = stateDigitDashTZ 360 | } 361 | case stateDigitDashTZ: 362 | if unicode.IsDigit(r) { 363 | state = stateDigitDashTZDigit 364 | } 365 | case stateDigitDashTOffset: 366 | if r == ':' { 367 | state = stateDigitDashTOffsetColon 368 | } 369 | case stateDigitSlash: // starts digit then slash 02/ 370 | // 2014/07/10 06:55:38.156283 371 | // 03/19/2012 10:11:59 372 | // 04/2/2014 03:00:37 373 | // 3/1/2012 10:11:59 374 | // 4/8/2014 22:05 375 | // 3/1/2014 376 | // 10/13/2014 377 | // 01/02/2006 378 | // 1/2/06 379 | if unicode.IsDigit(r) || r == '/' { 380 | continue 381 | } 382 | switch r { 383 | case ' ': 384 | state = stateDigitSlashWS 385 | } 386 | case stateDigitSlashWS: // starts digit then slash 02/ more digits/slashes then whitespace 387 | // 2014/07/10 06:55:38.156283 388 | // 03/19/2012 10:11:59 389 | // 04/2/2014 03:00:37 390 | // 3/1/2012 10:11:59 391 | // 4/8/2014 22:05 392 | switch r { 393 | case ':': 394 | state = stateDigitSlashWSColon 395 | } 396 | case stateDigitSlashWSColon: // starts digit then slash 02/ more digits/slashes then whitespace 397 | // 2014/07/10 06:55:38.156283 398 | // 03/19/2012 10:11:59 399 | // 04/2/2014 03:00:37 400 | // 3/1/2012 10:11:59 401 | // 4/8/2014 22:05 402 | // 3/1/2012 10:11:59 AM 403 | switch r { 404 | case ':': 405 | state = stateDigitSlashWSColonColon 406 | case 'A', 'P': 407 | state = stateDigitSlashWSColonAMPM 408 | } 409 | case stateDigitSlashWSColonColon: // starts digit then slash 02/ more digits/slashes then whitespace 410 | // 2014/07/10 06:55:38.156283 411 | // 03/19/2012 10:11:59 412 | // 04/2/2014 03:00:37 413 | // 3/1/2012 10:11:59 414 | // 4/8/2014 22:05 415 | // 3/1/2012 10:11:59 AM 416 | switch r { 417 | case 'A', 'P': 418 | state = stateDigitSlashWSColonColonAMPM 419 | } 420 | case stateDigitAlpha: 421 | // 12 Feb 2006, 19:17 422 | // 12 Feb 2006, 19:17:22 423 | switch { 424 | case len(datestr) == len("02 Jan 2006, 15:04"): 425 | return parse("02 Jan 2006, 15:04", datestr, loc) 426 | case len(datestr) == len("02 Jan 2006, 15:04:05"): 427 | return parse("02 Jan 2006, 15:04:05", datestr, loc) 428 | case len(datestr) == len("2006年01月02日"): 429 | return parse("2006年01月02日", datestr, loc) 430 | case len(datestr) == len("2006年01月02日 15:04"): 431 | return parse("2006年01月02日 15:04", datestr, loc) 432 | case strings.Contains(datestr, "ago"): 433 | state = stateHowLongAgo 434 | } 435 | case stateAlpha: // starts alpha 436 | // stateAlphaWS 437 | // Mon Jan _2 15:04:05 2006 438 | // Mon Jan _2 15:04:05 MST 2006 439 | // Mon Jan 02 15:04:05 -0700 2006 440 | // Mon Aug 10 15:44:11 UTC+0100 2015 441 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 442 | // stateAlphaWSDigitComma 443 | // May 8, 2009 5:57:51 PM 444 | // 445 | // stateWeekdayComma 446 | // Monday, 02-Jan-06 15:04:05 MST 447 | // stateWeekdayCommaOffset 448 | // Monday, 02 Jan 2006 15:04:05 -0700 449 | // Monday, 02 Jan 2006 15:04:05 +0100 450 | // stateWeekdayAbbrevComma 451 | // Mon, 02-Jan-06 15:04:05 MST 452 | // Mon, 02 Jan 2006 15:04:05 MST 453 | // stateWeekdayAbbrevCommaOffset 454 | // Mon, 02 Jan 2006 15:04:05 -0700 455 | // Thu, 13 Jul 2017 08:58:40 +0100 456 | // stateWeekdayAbbrevCommaOffsetZone 457 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 458 | switch { 459 | case unicode.IsLetter(r): 460 | continue 461 | case r == ' ': 462 | state = stateAlphaWS 463 | case r == ',': 464 | if i == 3 { 465 | state = stateWeekdayAbbrevComma 466 | } else { 467 | state = stateWeekdayComma 468 | } 469 | } 470 | case stateWeekdayComma: // Starts alpha then comma 471 | // Mon, 02-Jan-06 15:04:05 MST 472 | // Mon, 02 Jan 2006 15:04:05 MST 473 | // stateWeekdayCommaOffset 474 | // Monday, 02 Jan 2006 15:04:05 -0700 475 | // Monday, 02 Jan 2006 15:04:05 +0100 476 | switch { 477 | case r == '-': 478 | if i < 15 { 479 | return parse("Monday, 02-Jan-06 15:04:05 MST", datestr, loc) 480 | } 481 | state = stateWeekdayCommaOffset 482 | case r == '+': 483 | state = stateWeekdayCommaOffset 484 | } 485 | case stateWeekdayAbbrevComma: // Starts alpha then comma 486 | // Mon, 02-Jan-06 15:04:05 MST 487 | // Mon, 02 Jan 2006 15:04:05 MST 488 | // stateWeekdayAbbrevCommaOffset 489 | // Mon, 02 Jan 2006 15:04:05 -0700 490 | // Thu, 13 Jul 2017 08:58:40 +0100 491 | // stateWeekdayAbbrevCommaOffsetZone 492 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 493 | switch { 494 | case r == '-': 495 | if i < 15 { 496 | return parse("Mon, 02-Jan-06 15:04:05 MST", datestr, loc) 497 | } 498 | state = stateWeekdayAbbrevCommaOffset 499 | case r == '+': 500 | state = stateWeekdayAbbrevCommaOffset 501 | } 502 | 503 | case stateWeekdayAbbrevCommaOffset: 504 | // stateWeekdayAbbrevCommaOffset 505 | // Mon, 02 Jan 2006 15:04:05 -0700 506 | // Thu, 13 Jul 2017 08:58:40 +0100 507 | // stateWeekdayAbbrevCommaOffsetZone 508 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 509 | if r == '(' { 510 | state = stateWeekdayAbbrevCommaOffsetZone 511 | } 512 | 513 | case stateAlphaWS: // Starts alpha then whitespace 514 | // Mon Jan _2 15:04:05 2006 515 | // Mon Jan _2 15:04:05 MST 2006 516 | // Mon Jan 02 15:04:05 -0700 2006 517 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 518 | // Mon Aug 10 15:44:11 UTC+0100 2015 519 | switch { 520 | case unicode.IsLetter(r): 521 | state = stateAlphaWSAlpha 522 | case unicode.IsDigit(r): 523 | state = stateAlphaWSDigitComma 524 | } 525 | 526 | case stateAlphaWSDigitComma: // Starts Alpha, whitespace, digit, comma 527 | // May 8, 2009 5:57:51 PM 528 | // May 8, 2009 529 | if len(datestr) == len("May 8, 2009") { 530 | return parse("Jan 2, 2006", datestr, loc) 531 | } 532 | return parse("Jan 2, 2006 3:04:05 PM", datestr, loc) 533 | 534 | case stateAlphaWSAlpha: // Alpha, whitespace, alpha 535 | // Mon Jan _2 15:04:05 2006 536 | // Mon Jan 02 15:04:05 -0700 2006 537 | // Mon Jan _2 15:04:05 MST 2006 538 | // Mon Aug 10 15:44:11 UTC+0100 2015 539 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 540 | if r == ':' { 541 | state = stateAlphaWSAlphaColon 542 | } 543 | case stateAlphaWSAlphaColon: // Alpha, whitespace, alpha, : 544 | // Mon Jan _2 15:04:05 2006 545 | // Mon Jan 02 15:04:05 -0700 2006 546 | // Mon Jan _2 15:04:05 MST 2006 547 | // Mon Aug 10 15:44:11 UTC+0100 2015 548 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 549 | if unicode.IsLetter(r) { 550 | state = stateAlphaWSAlphaColonAlpha 551 | } else if r == '-' || r == '+' { 552 | state = stateAlphaWSAlphaColonOffset 553 | } 554 | case stateAlphaWSAlphaColonAlpha: // Alpha, whitespace, alpha, :, alpha 555 | // Mon Jan _2 15:04:05 MST 2006 556 | // Mon Aug 10 15:44:11 UTC+0100 2015 557 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 558 | if r == '+' { 559 | state = stateAlphaWSAlphaColonAlphaOffset 560 | } 561 | case stateAlphaWSAlphaColonAlphaOffset: // Alpha, whitespace, alpha, : , alpha, offset, ? 562 | // Mon Aug 10 15:44:11 UTC+0100 2015 563 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 564 | if unicode.IsLetter(r) { 565 | state = stateAlphaWSAlphaColonAlphaOffsetAlpha 566 | } 567 | default: 568 | break iterRunes 569 | } 570 | } 571 | 572 | switch state { 573 | case stateDigit: 574 | // unixy timestamps ish 575 | // 1499979655583057426 nanoseconds 576 | // 1499979795437000 micro-seconds 577 | // 1499979795437 milliseconds 578 | // 1384216367189 579 | // 1332151919 seconds 580 | // 20140601 yyyymmdd 581 | // 2014 yyyy 582 | t := time.Time{} 583 | if len(datestr) > len("1499979795437000") { 584 | if nanoSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 585 | t = time.Unix(0, nanoSecs) 586 | } 587 | } else if len(datestr) > len("1499979795437") { 588 | if microSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 589 | t = time.Unix(0, microSecs*1000) 590 | } 591 | } else if len(datestr) > len("1332151919") { 592 | if miliSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 593 | t = time.Unix(0, miliSecs*1000*1000) 594 | } 595 | } else if len(datestr) == len("20140601") { 596 | return parse("20060102", datestr, loc) 597 | } else if len(datestr) == len("2014") { 598 | return parse("2006", datestr, loc) 599 | } 600 | if t.IsZero() { 601 | if secs, err := strconv.ParseInt(datestr, 10, 64); err == nil { 602 | if secs < 0 { 603 | // Now, for unix-seconds we aren't going to guess a lot 604 | // nothing before unix-epoch 605 | } else { 606 | t = time.Unix(secs, 0) 607 | } 608 | } 609 | } 610 | if !t.IsZero() { 611 | if loc == nil { 612 | return t, nil 613 | } 614 | return t.In(loc), nil 615 | } 616 | 617 | case stateDigitDash: // starts digit then dash 02- 618 | // 2006-01-02 619 | // 2006-01 620 | if len(datestr) == len("2014-04-26") { 621 | return parse("2006-01-02", datestr, loc) 622 | } else if len(datestr) == len("2014-04") { 623 | return parse("2006-01", datestr, loc) 624 | } 625 | case stateDigitDashAlpha: 626 | // 2013-Feb-03 627 | return parse("2006-Jan-02", datestr, loc) 628 | 629 | case stateDigitDashTOffset: 630 | // 2006-01-02T15:04:05+0000 631 | return parse("2006-01-02T15:04:05-0700", datestr, loc) 632 | 633 | case stateDigitDashTOffsetColon: 634 | // With another +/- time-zone at end 635 | // 2006-01-02T15:04:05.999999999+07:00 636 | // 2006-01-02T15:04:05.999999999-07:00 637 | // 2006-01-02T15:04:05.999999+07:00 638 | // 2006-01-02T15:04:05.999999-07:00 639 | // 2006-01-02T15:04:05.999+07:00 640 | // 2006-01-02T15:04:05.999-07:00 641 | // 2006-01-02T15:04:05+07:00 642 | // 2006-01-02T15:04:05-07:00 643 | return parse("2006-01-02T15:04:05-07:00", datestr, loc) 644 | 645 | case stateDigitDashT: // starts digit then dash 02- then T 646 | // 2006-01-02T15:04:05.999999 647 | // 2006-01-02T15:04:05.999999 648 | return parse("2006-01-02T15:04:05", datestr, loc) 649 | 650 | case stateDigitDashTZDigit: 651 | // With a time-zone at end after Z 652 | // 2006-01-02T15:04:05.999999999Z07:00 653 | // 2006-01-02T15:04:05Z07:00 654 | // RFC3339 = "2006-01-02T15:04:05Z07:00" 655 | // RFC3339Nano = "2006-01-02T15:04:05.999999999Z07:00" 656 | return time.Time{}, fmt.Errorf("RFC339 Dates may not contain both Z & Offset for %q see https://github.com/golang/go/issues/5294", datestr) 657 | 658 | case stateDigitDashTZ: // starts digit then dash 02- then T Then Z 659 | // 2006-01-02T15:04:05.999999999Z 660 | // 2006-01-02T15:04:05.99999999Z 661 | // 2006-01-02T15:04:05.9999999Z 662 | // 2006-01-02T15:04:05.999999Z 663 | // 2006-01-02T15:04:05.99999Z 664 | // 2006-01-02T15:04:05.9999Z 665 | // 2006-01-02T15:04:05.999Z 666 | // 2006-01-02T15:04:05.99Z 667 | // 2009-08-12T22:15Z -- No seconds/milliseconds 668 | switch len(datestr) { 669 | case len("2009-08-12T22:15Z"): 670 | return parse("2006-01-02T15:04Z", datestr, loc) 671 | default: 672 | return parse("2006-01-02T15:04:05Z", datestr, loc) 673 | } 674 | case stateDigitDashWs: // starts digit then dash 02- then whitespace 1 << 2 << 5 + 3 675 | // 2013-04-01 22:43:22 676 | return parse("2006-01-02 15:04:05", datestr, loc) 677 | 678 | case stateDigitDashWsWsOffset: 679 | // 2006-01-02 15:04:05 -0700 680 | return parse("2006-01-02 15:04:05 -0700", datestr, loc) 681 | 682 | case stateDigitDashWsWsOffsetColon: 683 | // 2006-01-02 15:04:05 -07:00 684 | return parse("2006-01-02 15:04:05 -07:00", datestr, loc) 685 | 686 | case stateDigitDashWsWsOffsetAlpha: 687 | // 2015-02-18 00:12:00 +0000 UTC 688 | t, err := parse("2006-01-02 15:04:05 -0700 UTC", datestr, loc) 689 | if err == nil { 690 | return t, nil 691 | } 692 | return parse("2006-01-02 15:04:05 +0000 GMT", datestr, loc) 693 | 694 | case stateDigitDashWsWsOffsetColonAlpha: 695 | // 2015-02-18 00:12:00 +00:00 UTC 696 | return parse("2006-01-02 15:04:05 -07:00 UTC", datestr, loc) 697 | 698 | case stateDigitDashWsOffset: 699 | // 2017-07-19 03:21:51+00:00 700 | return parse("2006-01-02 15:04:05-07:00", datestr, loc) 701 | 702 | case stateDigitDashWsWsAlpha: 703 | // 2014-12-16 06:20:00 UTC 704 | t, err := parse("2006-01-02 15:04:05 UTC", datestr, loc) 705 | if err == nil { 706 | return t, nil 707 | } 708 | t, err = parse("2006-01-02 15:04:05 GMT", datestr, loc) 709 | if err == nil { 710 | return t, nil 711 | } 712 | if len(datestr) > len("2006-01-02 03:04:05") { 713 | t, err = parse("2006-01-02 03:04:05", datestr[:len("2006-01-02 03:04:05")], loc) 714 | if err == nil { 715 | return t, nil 716 | } 717 | } 718 | 719 | case stateDigitDashWsPeriod: 720 | // 2012-08-03 18:31:59.257000000 721 | // 2014-04-26 17:24:37.3186369 722 | // 2017-01-27 00:07:31.945167 723 | // 2016-03-14 00:00:00.000 724 | return parse("2006-01-02 15:04:05", datestr, loc) 725 | 726 | case stateDigitDashWsPeriodAlpha: 727 | // 2012-08-03 18:31:59.257000000 UTC 728 | // 2014-04-26 17:24:37.3186369 UTC 729 | // 2017-01-27 00:07:31.945167 UTC 730 | // 2016-03-14 00:00:00.000 UTC 731 | return parse("2006-01-02 15:04:05 UTC", datestr, loc) 732 | 733 | case stateDigitDashWsPeriodOffset: 734 | // 2012-08-03 18:31:59.257000000 +0000 735 | // 2014-04-26 17:24:37.3186369 +0000 736 | // 2017-01-27 00:07:31.945167 +0000 737 | // 2016-03-14 00:00:00.000 +0000 738 | return parse("2006-01-02 15:04:05 -0700", datestr, loc) 739 | 740 | case stateDigitDashWsPeriodOffsetAlpha: 741 | // 2012-08-03 18:31:59.257000000 +0000 UTC 742 | // 2014-04-26 17:24:37.3186369 +0000 UTC 743 | // 2017-01-27 00:07:31.945167 +0000 UTC 744 | // 2016-03-14 00:00:00.000 +0000 UTC 745 | return parse("2006-01-02 15:04:05 -0700 UTC", datestr, loc) 746 | 747 | case stateAlphaWSAlphaColon: 748 | // Mon Jan _2 15:04:05 2006 749 | return parse(time.ANSIC, datestr, loc) 750 | 751 | case stateAlphaWSAlphaColonOffset: 752 | // Mon Jan 02 15:04:05 -0700 2006 753 | return parse(time.RubyDate, datestr, loc) 754 | 755 | case stateAlphaWSAlphaColonAlpha: 756 | // Mon Jan _2 15:04:05 MST 2006 757 | return parse(time.UnixDate, datestr, loc) 758 | 759 | case stateAlphaWSAlphaColonAlphaOffset: 760 | // Mon Aug 10 15:44:11 UTC+0100 2015 761 | return parse("Mon Jan 02 15:04:05 MST-0700 2006", datestr, loc) 762 | 763 | case stateAlphaWSAlphaColonAlphaOffsetAlpha: 764 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 765 | if len(datestr) > len("Mon Jan 02 2006 15:04:05 MST-0700") { 766 | // What effing time stamp is this? 767 | // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) 768 | dateTmp := datestr[:33] 769 | return parse("Mon Jan 02 2006 15:04:05 MST-0700", dateTmp, loc) 770 | } 771 | case stateDigitSlash: // starts digit then slash 02/ (but nothing else) 772 | // 3/1/2014 773 | // 10/13/2014 774 | // 01/02/2006 775 | // 2014/10/13 776 | if firstSlash == 4 { 777 | if len(datestr) == len("2006/01/02") { 778 | return parse("2006/01/02", datestr, loc) 779 | } 780 | return parse("2006/1/2", datestr, loc) 781 | } 782 | for _, parseFormat := range shortDates { 783 | if t, err := parse(parseFormat, datestr, loc); err == nil { 784 | return t, nil 785 | } 786 | } 787 | 788 | case stateDigitSlashWSColon: // starts digit then slash 02/ more digits/slashes then whitespace 789 | // 4/8/2014 22:05 790 | // 04/08/2014 22:05 791 | // 2014/4/8 22:05 792 | // 2014/04/08 22:05 793 | 794 | if firstSlash == 4 { 795 | for _, layout := range []string{"2006/01/02 15:04", "2006/1/2 15:04", "2006/01/2 15:04", "2006/1/02 15:04"} { 796 | if t, err := parse(layout, datestr, loc); err == nil { 797 | return t, nil 798 | } 799 | } 800 | } else { 801 | for _, layout := range []string{"01/02/2006 15:04", "01/2/2006 15:04", "1/02/2006 15:04", "1/2/2006 15:04"} { 802 | if t, err := parse(layout, datestr, loc); err == nil { 803 | return t, nil 804 | } 805 | } 806 | } 807 | 808 | case stateDigitSlashWSColonAMPM: // starts digit then slash 02/ more digits/slashes then whitespace 809 | // 4/8/2014 22:05 PM 810 | // 04/08/2014 22:05 PM 811 | // 04/08/2014 1:05 PM 812 | // 2014/4/8 22:05 PM 813 | // 2014/04/08 22:05 PM 814 | 815 | if firstSlash == 4 { 816 | for _, layout := range []string{"2006/01/02 03:04 PM", "2006/01/2 03:04 PM", "2006/1/02 03:04 PM", "2006/1/2 03:04 PM", 817 | "2006/01/02 3:04 PM", "2006/01/2 3:04 PM", "2006/1/02 3:04 PM", "2006/1/2 3:04 PM"} { 818 | if t, err := parse(layout, datestr, loc); err == nil { 819 | return t, nil 820 | } 821 | } 822 | } else { 823 | for _, layout := range []string{"01/02/2006 03:04 PM", "01/2/2006 03:04 PM", "1/02/2006 03:04 PM", "1/2/2006 03:04 PM", 824 | "01/02/2006 3:04 PM", "01/2/2006 3:04 PM", "1/02/2006 3:04 PM", "1/2/2006 3:04 PM"} { 825 | if t, err := parse(layout, datestr, loc); err == nil { 826 | return t, nil 827 | } 828 | 829 | } 830 | } 831 | 832 | case stateDigitSlashWSColonColon: // starts digit then slash 02/ more digits/slashes then whitespace double colons 833 | // 2014/07/10 06:55:38.156283 834 | // 03/19/2012 10:11:59 835 | // 3/1/2012 10:11:59 836 | // 03/1/2012 10:11:59 837 | // 3/01/2012 10:11:59 838 | if firstSlash == 4 { 839 | for _, layout := range []string{"2006/01/02 15:04:05", "2006/1/02 15:04:05", "2006/01/2 15:04:05", "2006/1/2 15:04:05"} { 840 | if t, err := parse(layout, datestr, loc); err == nil { 841 | return t, nil 842 | } 843 | } 844 | } else { 845 | for _, layout := range []string{"01/02/2006 15:04:05", "1/02/2006 15:04:05", "01/2/2006 15:04:05", "1/2/2006 15:04:05"} { 846 | if t, err := parse(layout, datestr, loc); err == nil { 847 | return t, nil 848 | } 849 | } 850 | } 851 | 852 | case stateDigitSlashWSColonColonAMPM: // starts digit then slash 02/ more digits/slashes then whitespace double colons 853 | // 2014/07/10 06:55:38.156283 PM 854 | // 03/19/2012 10:11:59 PM 855 | // 3/1/2012 10:11:59 PM 856 | // 03/1/2012 10:11:59 PM 857 | // 3/01/2012 10:11:59 PM 858 | 859 | if firstSlash == 4 { 860 | for _, layout := range []string{"2006/01/02 03:04:05 PM", "2006/1/02 03:04:05 PM", "2006/01/2 03:04:05 PM", "2006/1/2 03:04:05 PM", 861 | "2006/01/02 3:04:05 PM", "2006/1/02 3:04:05 PM", "2006/01/2 3:04:05 PM", "2006/1/2 3:04:05 PM"} { 862 | if t, err := parse(layout, datestr, loc); err == nil { 863 | return t, nil 864 | } 865 | } 866 | } else { 867 | for _, layout := range []string{"01/02/2006 03:04:05 PM", "1/02/2006 03:04:05 PM", "01/2/2006 03:04:05 PM", "1/2/2006 03:04:05 PM"} { 868 | if t, err := parse(layout, datestr, loc); err == nil { 869 | return t, nil 870 | } 871 | } 872 | } 873 | 874 | case stateWeekdayCommaOffset: 875 | // Monday, 02 Jan 2006 15:04:05 -0700 876 | // Monday, 02 Jan 2006 15:04:05 +0100 877 | return parse("Monday, 02 Jan 2006 15:04:05 -0700", datestr, loc) 878 | case stateWeekdayAbbrevComma: // Starts alpha then comma 879 | // Mon, 02-Jan-06 15:04:05 MST 880 | // Mon, 02 Jan 2006 15:04:05 MST 881 | return parse("Mon, 02 Jan 2006 15:04:05 MST", datestr, loc) 882 | case stateWeekdayAbbrevCommaOffset: 883 | // Mon, 02 Jan 2006 15:04:05 -0700 884 | // Thu, 13 Jul 2017 08:58:40 +0100 885 | // RFC1123Z = "Mon, 02 Jan 2006 15:04:05 -0700" // RFC1123 with numeric zone 886 | return parse("Mon, 02 Jan 2006 15:04:05 -0700", datestr, loc) 887 | case stateWeekdayAbbrevCommaOffsetZone: 888 | // Tue, 11 Jul 2017 16:28:13 +0200 (CEST) 889 | return parse("Mon, 02 Jan 2006 15:04:05 -0700 (CEST)", datestr, loc) 890 | case stateHowLongAgo: 891 | // 1 minutes ago 892 | // 1 hours ago 893 | // 1 day ago 894 | switch len(datestr) { 895 | case len("1 minutes ago"), len("10 minutes ago"), len("100 minutes ago"): 896 | return agoTime(datestr, time.Minute) 897 | case len("1 hours ago"), len("10 hours ago"): 898 | return agoTime(datestr, time.Hour) 899 | case len("1 day ago"), len("10 day ago"): 900 | return agoTime(datestr, Day) 901 | } 902 | } 903 | 904 | return time.Time{}, fmt.Errorf("Could not find date format for %s", datestr) 905 | } 906 | 907 | func agoTime(datestr string, d time.Duration) (time.Time, error) { 908 | dstrs := strings.Split(datestr, " ") 909 | m, err := strconv.Atoi(dstrs[0]) 910 | if err != nil { 911 | return time.Time{}, err 912 | } 913 | return time.Now().Add(-d * time.Duration(m)), nil 914 | } 915 | --------------------------------------------------------------------------------