├── requirements.txt ├── .gitignore ├── LICENSE ├── README.md ├── influx_test.py ├── mysql2influx.py └── time_utils.py /requirements.txt: -------------------------------------------------------------------------------- 1 | influxdb==2.10.0 2 | MySQL-python==1.2.5 3 | python-dateutil==2.4.2 4 | pytz==2015.7 5 | requests==2.9.1 6 | six==1.10.0 7 | wheel==0.24.0 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | *.ini 26 | 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Great Lakes Energy 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Mysql-to-influxdb 2 | Quick hack to get data from MySQL into influx db to display with Graphite and Grafana. This is a quick script I created for a very specific use case so it won't copy your whole database over magically. If you need this you can modify the script to fit your needs. 3 | 4 | This script will meant to run constantly or one time and send data from a single table in MYSQL to influx. If you have multiple tables this will be an issue. Also it requires that the MYSQL table is altered to have a specific column which indicates whether this piece of data has been copied to influx already or not. 5 | 6 | # Config 7 | This script needs a config file to run simply create config file. Below is a samle config file for settings.ini 8 | 9 | 10 | [mysql] 11 | host : mysql_server_hostname 12 | port : mysql_server_port # Default is3306 13 | username : mysql_user_name 14 | password : mysql_user_password 15 | db : mysql_database 16 | table : mysql_table 17 | check_field : column_to_check_if_data_has_been_transferred 18 | time_field : colum_that_contains_timestam_in_table 19 | siteid_field : column_which_contains_site_id_tag 20 | 21 | 22 | [influx] 23 | host : localhost 24 | port : 8086 25 | username : influx_username 26 | password : influx_pass 27 | db : sesh 28 | 29 | [server] 30 | interval : 5 31 | 32 | [site_info] 33 | site_name : tag_name_to_append_to_all_data_going_into_influx 34 | # Usage 35 | first run 36 | ```pip install -r requirements.txt``` 37 | then 38 | ```python mysql2influx.py -d -c settings.ini -s``` 39 | 40 | This will run the script as a server in debug mode 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /influx_test.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from influxdb import InfluxDBClient 4 | 5 | 6 | def main(host='localhost', port=8086): 7 | user = 'root' 8 | password = 'gle12345' 9 | dbname = 'example' 10 | dbuser = 'sesh' 11 | dbuser_password = 'my_secret_password' 12 | query = 'select value from cpu_load_short;' 13 | """ 14 | json_body = [ 15 | { 16 | "measurement": "cpu_load_short", 17 | "tags": { 18 | "host": "server01", 19 | "region": "us-west" 20 | }, 21 | "time": "2009-11-10T23:00:00Z", 22 | "fields": { 23 | "value": 0.64 24 | } 25 | } 26 | ] 27 | """ 28 | """ 29 | json_body = [ 30 | {'fields': { 31 | 'value': 0.0 32 | }, 33 | 'tags': { 34 | 'source': 'wago', 35 | 'site_name': 'Nyange' 36 | }, 37 | 'time': '2015-11-05T07:39:41Z', 38 | 'measurement': 'rTotalApparentPower'} 39 | ] 40 | """ 41 | json_body = [{'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalActiveEnergy'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalReactiveEnergy'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCPower2'}, {'fields': {'value': 2.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCPower1'}, {'fields': {'value': 9.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCVolt1'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arVoltage_L_N2'}, {'fields': {'value': 9.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arVoltage_L_N1'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCVolt2'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arVoltage_L_N3'}, {'fields': {'value': 0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'xRotatingField'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalApparentEnergy'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arFrequency3'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arFrequency2'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arFrequency1'}, {'fields': {'value': datetime.datetime(2015, 11, 5, 7, 39, 41)}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'timestamp'}, {'fields': {'value': 3.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalActivePower'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arCurrent2'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arCurrent3'}, {'fields': {'value': 9.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arCurrent1'}, {'fields': {'value': 1000L}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'iSiteCode'}, {'fields': {'value': 3.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalReactivePower'}, {'fields': {'value': 0L}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'trans'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCCurr2'}, {'fields': {'value': 10.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCCurr1'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalPowerFactorPF'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalApparentPower'}] 42 | 43 | 44 | client = InfluxDBClient(host, port, user, password, dbname) 45 | 46 | print("Create database: " + dbname) 47 | client.create_database(dbname) 48 | 49 | print("Create a retention policy") 50 | client.create_retention_policy('awesome_policy', '3d', 3, default=True) 51 | 52 | print("Switch user: " + dbuser) 53 | client.switch_user(dbuser, dbuser_password) 54 | 55 | print("Write points: {0}".format(json_body)) 56 | client.write_points(json_body) 57 | 58 | print("Queying data: " + query) 59 | result = client.query(query) 60 | 61 | print("Result: {0}".format(result)) 62 | 63 | print("Switch user: " + user) 64 | client.switch_user(user, password) 65 | 66 | print("Drop database: " + dbname) 67 | client.drop_database(dbname) 68 | 69 | 70 | def parse_args(): 71 | parser = argparse.ArgumentParser( 72 | description='example code to play with InfluxDB') 73 | parser.add_argument('--host', type=str, required=False, default='localhost', 74 | help='hostname of InfluxDB http API') 75 | parser.add_argument('--port', type=int, required=False, default=8086, 76 | help='port of InfluxDB http API') 77 | return parser.parse_args() 78 | 79 | 80 | if __name__ == '__main__': 81 | args = parse_args() 82 | main(host=args.host, port=args.port) 83 | -------------------------------------------------------------------------------- /mysql2influx.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import logging 4 | import os 5 | import argparse 6 | import MySQLdb 7 | import MySQLdb.cursors 8 | import time 9 | 10 | from ConfigParser import RawConfigParser 11 | from influxdb import InfluxDBClient 12 | from time_utils import get_epoch_from_datetime 13 | from datetime import datetime 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class Mysql2Influx: 18 | 19 | def __init__(self,config): 20 | 21 | #TODO put site info into settings file 22 | self._site_name = config.get('site_info','site_name') 23 | self._table = config.get('mysql','table') 24 | self._siteid_field = config.get('mysql','siteid_field') 25 | 26 | if config.has_option('mysql','time_field'): 27 | self._time_field = config.get('mysql','time_field') 28 | else: 29 | self._time_field = 'timestamp' 30 | #intitialise client for mysql database 31 | self._mysql_host = config.get('mysql','host') 32 | self._mysql_username = config.get('mysql','username') 33 | self._mysql_password = config.get('mysql','password') 34 | self._mysql_db = config.get('mysql','db') 35 | 36 | self._influx_db_host = config.get('influx','host') 37 | self._influx_db_port = config.get('influx','port') 38 | self._influx_db_username = config.get('influx','username') 39 | self._influx_db_password = config.get('influx','password') 40 | self._influx_db = config.get('influx','db') 41 | 42 | self._complete = False 43 | self._check_field = config.get('mysql','check_field') 44 | 45 | self.initialise_database() 46 | 47 | 48 | def initialise_database(self): 49 | self._db_client = MySQLdb.connect ( self._mysql_host, 50 | self._mysql_username, 51 | self._mysql_password, 52 | self._mysql_db, 53 | cursorclass = MySQLdb.cursors.DictCursor 54 | ) 55 | 56 | self._influx_client = InfluxDBClient( 57 | self._influx_db_host, 58 | self._influx_db_port, 59 | self._influx_db_username, 60 | self._influx_db_password, 61 | self._influx_db 62 | ) 63 | 64 | 65 | 66 | def transfer_data(self): 67 | self._get_data_from_mysql() 68 | 69 | self._update_rows() 70 | 71 | logger.debug('All data transfer completed : %s '% self._complete) 72 | 73 | 74 | def _purge_data_in_db(self): 75 | """ 76 | Once the data is configured and within influx we can pruge our database 77 | """ 78 | if self._complete: 79 | query = "SELECT * FROM TABLE %s WHERE %s = 0 ORDER BY %s DESC"%(self._table, self._check_fields,self._time_field) 80 | 81 | 82 | def _get_data_from_mysql(self): 83 | """ 84 | get the cursor to dump all the data from mysql 85 | """ 86 | query = "SELECT * FROM `%s` WHERE `%s`=0 ORDER BY %s DESC"%(self._table,self._check_field,self._time_field) 87 | 88 | logger.debug('executing query %s '% query) 89 | cursor = self._db_client.cursor() 90 | cursor.execute(query) 91 | 92 | # pull data from mysql in X increments 93 | rows = cursor.fetchall() 94 | logger.info('querying MYSQL got %s rows'%len(rows)) 95 | 96 | self._format_data(rows) 97 | 98 | 99 | def _send_data_to_influx(self,data_point): 100 | """ 101 | Break up data to make sure in the format the inflxu like 102 | """ 103 | logger.debug('Sending data to influx %s ...'%data_point[0]) 104 | self._influx_client.write_points(data_point) 105 | 106 | 107 | def _format_data(self,data): 108 | self._complete = False 109 | #turn time into epoch timesa 110 | if data: 111 | logger.debug('Got data from mysql') 112 | for row in data: 113 | data_list =[] 114 | for key in row.keys(): 115 | #format date to epoch 116 | epoch_time = row[self._time_field].isoformat() 117 | if not isinstance(row[key],datetime): 118 | data_point = {"measurement":key, 119 | "tags":{"site_name":row[self._siteid_field], 120 | "source": "wago"}, 121 | "time" : "%sZ"%epoch_time, 122 | "fields" : {"value":row[key]} 123 | } 124 | 125 | data_list.append(data_point) 126 | logger.debug("data_point = %s"%data_point) 127 | self._send_data_to_influx(data_list) 128 | self._complete = True 129 | 130 | def _update_rows(self): 131 | query = 'UPDATE %s SET %s=1 WHERE %s=0;'%(self._table,self._check_field,self._check_field) 132 | if self._complete: 133 | logger.debug('Updating rows : executing query %s '% query) 134 | c = self._db_client.cursor() 135 | c.execute(query) 136 | self._db_client.commit() 137 | def main(): 138 | #Argument parsing 139 | parser = argparse.ArgumentParser(description = 'Get Time series data from MYSQL and push it to influxdb' ) 140 | 141 | parser.add_argument( '-d', '--debug', help = 'set logging level to debug', action = 'store_true') 142 | parser.add_argument( '-c', '--config', help = 'config file location', nargs = 1, default = 'settings.ini' ) 143 | parser.add_argument( '-s', '--server', help = 'run as server with interval ',action = 'store_true' ) 144 | 145 | args = parser.parse_args() 146 | 147 | 148 | # Init logging 149 | logging.basicConfig(level=(logging.DEBUG if True or args.debug else logging.INFO)) 150 | 151 | logger.debug('Starting up with config file %s' % (args.config)) 152 | #get config file 153 | config = RawConfigParser() 154 | config.read(args.config) 155 | 156 | _sleep_time = float(config.get('server','interval')) 157 | 158 | logger.debug('configs %s' % (config.sections())) 159 | #start 160 | mclient = Mysql2Influx(config) 161 | if not args.server: 162 | mclient.transfer_data() 163 | else: 164 | logger.info('Starting up server mode interval: %s' % _sleep_time) 165 | while True: 166 | try: 167 | mclient.transfer_data() 168 | except Exception,e: 169 | logger.exception("Error occured will try again") 170 | time.sleep(_sleep_time) 171 | mclient.initialise_database() 172 | 173 | if __name__ == '__main__': 174 | #Check our config file 175 | main() 176 | -------------------------------------------------------------------------------- /time_utils.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime,timedelta 2 | from dateutil.parser import parse 3 | from time import localtime,strftime 4 | from django.conf import settings 5 | from pytz import timezone 6 | from django.utils import timezone as timezone_dt 7 | from tzwhere import tzwhere 8 | 9 | 10 | def get_epoch(tz=None): 11 | """ 12 | Return number of seconds since 1970-01-01- epoch 13 | """ 14 | now = datetime.now() 15 | if tz: 16 | tz = _clean_tz(tz) 17 | now = timezone_dt.now(tz) 18 | epoch = datetime(1970,1,1) 19 | diff = now - epoch 20 | diff = str(diff.total_seconds()) 21 | diff_seconds = diff.split('.') 22 | diff_int = diff_seconds[0] 23 | return diff_int 24 | 25 | def get_yesterday(tz=None): 26 | 27 | now = datetime.date(datetime.now()) 28 | if tz: 29 | tz = _clean_tz(tz) 30 | now = timezone_dt.now(tz) 31 | 32 | one_day = timedelta(days=1) 33 | return now - one_day 34 | 35 | 36 | def get_time_interval_array(interval,interval_type,start,end,tz=None): 37 | """ 38 | Get an array between the provided time frame 39 | with slices in the given interval timedelta 40 | between start and end 41 | """ 42 | 43 | if tz: 44 | tz = _clean_tz(tz) 45 | timez = timezone(tz) 46 | else: 47 | timez = timezone(settings.TIME_ZONE) 48 | # TODO finish timezone implemenation 49 | result = [] 50 | kwargs = {} 51 | kwargs[interval_type] = interval 52 | interval = timedelta(**kwargs) 53 | while start < end: 54 | start = start + interval 55 | result.append(start) 56 | 57 | return result 58 | 59 | def get_epoch_from_datetime(date): 60 | """ 61 | Return number of seconds since 1970-01-01- epoch 62 | from given date 63 | @params: dateobject 64 | """ 65 | epoch = datetime(1970,1,1) 66 | if date.tzinfo: 67 | epoch = datetime(1970,1,1,tzinfo=date.tzinfo) 68 | diff = date - epoch 69 | seconds_only = str(diff.total_seconds()) 70 | seconds_only_str = seconds_only.split('.') 71 | seconds_only = seconds_only_str[0] 72 | return seconds_only 73 | 74 | def get_epoch_from_date(year, month, day, hours, minutes, tz=None): 75 | """ 76 | Return number of seconds since 1970-01-01- epoch 77 | from given date 78 | @params: month,day,year,hours,minutes 79 | """ 80 | date = datetime(year,month,day,hours,minutes) 81 | if tz: 82 | tz = _clean_tz(tz) 83 | date = datetime(year,month,day,hours,minutes,tzinfo=tz) 84 | 85 | epoch = datetime(1970,1,1) 86 | diff = date - epoch 87 | return diff.total_seconds() 88 | 89 | def localize(nv_datetime,tz): 90 | """ 91 | Add timezone info to datetime object 92 | """ 93 | if not isinstance(nv_datetime,datetime): 94 | try: 95 | # Try to convert to datetime 96 | time = datetime(nv_datetime) 97 | except Exception, e: 98 | return nv_datetime 99 | # Is our datetime object naive? 100 | if nv_datetime.tzinfo is not None and nv_datetime.tzinfo.utcoffset(nv_datetime) is not None: 101 | return nv_datetime 102 | localtz = timezone(tz) 103 | dt_aware = localtz.localize(nv_datetime) 104 | return dt_aware 105 | 106 | 107 | def epoch_to_date(seconds_time, tz=None): 108 | """ 109 | Translate seconds time to date 110 | """ 111 | time = strftime('%Y-%m-%d',localtime(seconds_time)) 112 | if tz: 113 | tz = _clean_tz(tz) 114 | time = localize(time,tz) 115 | 116 | return time 117 | 118 | def get_timezone_from_geo(lat, lon): 119 | """ 120 | Return String TimeZone from provided lat,lon 121 | """ 122 | tz = tzwhere.tzwhere() 123 | return tz.tzNameAt(float(lat), float(lon)) 124 | 125 | def epoch_to_datetime(seconds_time, tz=None): 126 | """ 127 | Translate seconds time to datetime object 128 | """ 129 | time = datetime.fromtimestamp(seconds_time) 130 | 131 | if tz: 132 | tz = _clean_tz(tz) 133 | time = localize(time,tz) 134 | 135 | 136 | #return strftime('%Y-%m-%dT%XZ',time) 137 | return time 138 | 139 | 140 | def get_last_five_days(from_date="now", tz=None): 141 | """ 142 | Get last days returned to you as datetime objects in array 143 | @params: 144 | from_date: (date to return consecutive days ongoing from) (optional) 145 | """ 146 | days = [] 147 | 148 | now = datetime.now() 149 | if not from_date == "now": 150 | now = timezone.now() 151 | 152 | if not from_date == "now": 153 | now = from_date 154 | 155 | delta = now - timedelta(5) 156 | for day in xrange(1,6): 157 | days.append(delta) 158 | delta = delta + timedelta(1) 159 | return days 160 | 161 | 162 | def get_days_interval_delta(start, end, delta=1): 163 | 164 | """ 165 | return the days inbetween the two sepcified dates 166 | @params: 167 | start: when is the interval stardate, datetime object 168 | end: when should the interval end , datatime object 169 | delta: increments in which to return dates, integer, default 1 day 170 | """ 171 | delta = timedelta(days=delta) 172 | curr = start 173 | days = [] 174 | while curr < end: 175 | days.append(curr) 176 | curr += delta 177 | return days 178 | 179 | def get_start_end_date(days_ago, start_day): 180 | """ 181 | get the delta of days ago from given start date 182 | """ 183 | delta = start_day - timedelta(days=days_ago) 184 | return delta 185 | 186 | def get_timesince_seconds(time, tz=None): 187 | """ 188 | Get timesince and time provided 189 | """ 190 | now = timezone_dt.now() 191 | if tz: 192 | now = timezone(tz).localize(datetime.now()) 193 | 194 | #loc = timezone(settings.TIME_ZONE) 195 | #now = loc.localize(now) 196 | diff = now - time 197 | return int(diff.total_seconds()) 198 | 199 | def get_timesince(time, tz=None): 200 | 201 | now = datetime.now() 202 | if tz: 203 | tz = _clean_tz(tz) 204 | now = timezone_dt.now(tz) 205 | 206 | loc = timezone(settings.TIME_ZONE) 207 | now = loc.localize(now) 208 | diff = now - time 209 | diff = format_timesince_seconds(int(diff.total_seconds())) 210 | return diff 211 | 212 | def format_timesince_seconds(seconds): 213 | seconds = abs(seconds) 214 | if seconds < 60: 215 | if seconds == 1: 216 | return seconds, " second ago" 217 | else: 218 | return seconds, " seconds ago" 219 | elif seconds < 3600: 220 | if seconds/60 < 2: # if it is still a minute 221 | return seconds/60, " minute ago" 222 | else: 223 | return seconds/60, " minutes ago" 224 | elif seconds < 86400: 225 | if seconds/3600 < 2: 226 | return seconds/3600, " hour ago" 227 | else: 228 | return seconds/3600, " hours ago" 229 | 230 | else: 231 | if seconds/86400 < 2: 232 | return seconds/86400, " day ago" 233 | else: 234 | return seconds/86400, " days ago" 235 | 236 | 237 | def get_date_dashed(date): 238 | date_string = str(date.year) + '-' + str(date.month) + '-' + str(date.day) 239 | return date_string 240 | 241 | 242 | def convert_influx_time_string(date_string, tz=None): 243 | """ Converts influx style strings to datetime """ 244 | parsed = parse(date_string) 245 | 246 | if tz: 247 | parsed =parsed.astimezone(timezone(tz)) 248 | 249 | return parsed 250 | 251 | def get_timesince_influx(date_string): 252 | date_obj = convert_influx_time_string(date_string) 253 | return get_timesince(date_obj) 254 | 255 | def _clean_tz(tz): 256 | return tz.strip().replace('\'','').replace('\"','') 257 | --------------------------------------------------------------------------------