├── data.csv ├── Author-Note.txt ├── LICENSE ├── .gitignore ├── command.py ├── README.md ├── tlogger.py └── mqtt-topic-logger.py /data.csv: -------------------------------------------------------------------------------- 1 | test/sensor1,time_ms,time,ms,Urms,Umin,Umax 2 | test/sensor2,time_ms,time,ms,Urms,Umin,Umax 3 | test/sensor3,time_ms,time,sensor,count,status 4 | test/sensor4,time_ms,time,ms,Urms,Umin,Umax,count -------------------------------------------------------------------------------- /Author-Note.txt: -------------------------------------------------------------------------------- 1 | This software is free to use and modify as you see fit. 2 | If you would like to support my efforts then you can make a conribution via PayPal using this link. 3 | http://paypal.me/stevesinternetguide 4 | 5 | If you have comments. What to report bugs,suggest improvements or features then either comment on the site. 6 | http://www.steves-internet-guide.com/simple-python-mqtt-data-logger/ 7 | or by email 8 | steve@steves-internet-guide.com 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | -------------------------------------------------------------------------------- /command.py: -------------------------------------------------------------------------------- 1 | #!python3 2 | ###demo code provided by Steve Cope at www.steves-internet-guide.com 3 | ##email steve@steves-internet-guide.com 4 | ###Free to use for any purpose 5 | import sys, getopt 6 | options=dict() 7 | 8 | ##EDIT HERE ############### 9 | options["username"]="" 10 | options["password"]="" 11 | options["broker"]="127.0.0.1" 12 | options["port"]=1883 13 | options["verbose"]=True 14 | options["cname"]="" 15 | options["topics"]=[("",0)] 16 | options["storechangesonly"]=True 17 | options["keepalive"]=60 18 | options["loglevel"]="WARNING" 19 | options["log_dir"]="mlogs" 20 | options["log_records"]=10000 21 | options["number_logs"]=0 22 | options["JSON"]=True 23 | options["csv"]=False 24 | options["header_file"]="data.csv" 25 | options["fname"]="data.csv" 26 | options["header_flag"]=False 27 | 28 | def command_input(options={}): 29 | topics_in=[] 30 | qos_in=[] 31 | 32 | valid_options=" --help -h or -b -p -t -q QOS -v -h \ 33 | -d logging debug -n Client ID or Name -u Username -P Password -s \ 34 | -l -r -f \ 35 | -L -j -c " 36 | print_options_flag=False 37 | try: 38 | opts, args = getopt.getopt(sys.argv[1:],"h:b:jcsdk:p:t:q:l:L:vn:u:P:l:r:f:") 39 | except getopt.GetoptError: 40 | print (sys.argv[0],valid_options) 41 | sys.exit(2) 42 | qos=0 43 | 44 | for opt, arg in opts: 45 | if opt == '-h': 46 | options["broker"] = str(arg) 47 | elif opt == "-b": 48 | options["broker"] = str(arg) 49 | elif opt == "-k": 50 | options["keepalive"] = int(arg) 51 | elif opt =="-p": 52 | options["port"] = int(arg) 53 | elif opt =="-t": 54 | topics_in.append(arg) 55 | elif opt =="-q": 56 | qos_in.append(int(arg)) 57 | elif opt =="-n": 58 | options["cname"]=arg 59 | elif opt =="-f": 60 | options["fname"]=arg 61 | options["header_flag"]=True 62 | elif opt =="-d": 63 | options["loglevel"]="DEBUG" 64 | elif opt == "-P": 65 | options["password"] = str(arg) 66 | elif opt == "-u": 67 | options["username"] = str(arg) 68 | elif opt =="-v": 69 | options["verbose"]=True 70 | elif opt =="-s": 71 | options["storechangesonly"]=False 72 | elif opt =="-l": 73 | options["log_dir"]=str(arg) 74 | elif opt =="-r": 75 | options["log_records"]=int(arg) 76 | elif opt =="-L": 77 | options["number_logs"]=int(arg) 78 | elif opt =="-j": 79 | options["JSON"]=True 80 | elif opt =="-c": 81 | options["csv"]=True 82 | 83 | 84 | lqos=len(qos_in) 85 | for i in range(len(topics_in)): 86 | if lqos >i: 87 | topics_in[i]=(topics_in[i],int(qos_in[i])) 88 | else: 89 | topics_in[i]=(topics_in[i],0) 90 | 91 | if topics_in: 92 | options["topics"]=topics_in #array with qos 93 | return options 94 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | Simple Python MQTT Data Logger by Topic 3 | 4 | . 5 | This software monitors a group of topics and creates a log file 6 | for each topic to which this MQTT client has subscribed. 7 | 8 | 9 | You can specify the root log directory when starting defaults to tlogs 10 | 11 | . 12 | 13 | Default log size is 5MB 14 | You need to provide the script with: 15 | 16 | List of topics to monitor 17 | broker name and port 18 | username and password if needed. 19 | base log directory and number of logs have defaults 20 | Valid command line Options: 21 | --help 22 | -h 23 | -b 24 | -p 25 | -t 26 | -q 27 | -v 28 | -d logging debug 29 | -n 30 | -u Username 31 | -P Password 32 | -s \ 33 | -l 34 | -T test mode when use with the data logger tester 35 | -r Record size in bytes default=10000 36 | -c log in csv format 37 | -f -f filename of header file default is data.csv 38 | 39 | Example Usage: 40 | 41 | You will always need to specify the broker name or IP address 42 | and the topics to log 43 | 44 | Note: you may not need to use the python prefix or may 45 | need to use python3 mqtt-topic-logger.py (Linux) 46 | 47 | Specify broker and topics 48 | 49 | python mqtt-topic-logger.py -h 192.168.1.157 -t sensors/# 50 | 51 | Specify broker and multiple topics 52 | 53 | python mqtt-topic-logger.py -h 192.168.1.157 -t sensors/# -t home/# 54 | 55 | 56 | Log All Data: 57 | 58 | python mqtt-topic-logger.py -h 192.168.1.157 -t sensors/# -s 59 | 60 | Specify the client name used by the logger 61 | 62 | python mqtt-topic-logger.py -h 192.168.1.157 -t sensors/# -n data-logger 63 | 64 | Specify the log directory 65 | 66 | python mqtt-topic-logger.py -h 192.168.1.157 -t sensors/# -l mylogs 67 | 68 | Log in CSV format 69 | 70 | python mqtt-topic-logger.py -h 192.168.1.157 -t sensors/# -c 71 | 72 | Log in CSV format and use data.csv header file 73 | 74 | python mqtt-topic-logger.py -h 192.168.1.157 -t sensors/# -c -f data.csv 75 | 76 | 77 | --------- 78 | Logger Class 79 | 80 | The class is implemented in a module called tlogger.py (topic logger). 81 | 82 | To create an instance you ca supply two parameters: 83 | 84 | The log directory- defaults to tlogs 85 | Max Log Size defaults to 5MB 86 | 87 | 88 | log=tlogger.T_logger(log_dir) 89 | 90 | The logger creates the log files in the directory using the topic names for the directory names and log files starting with log000.txt 91 | When the file reaches 5Mb it is rotated 92 | 93 | log data is JSON format with a timestamp added to the message 94 | 95 | 96 | The logger will return True if successful and False if not. 97 | 98 | To prevent loss of data in the case of computer failure the logs are continuously flushed to disk . 99 | 100 | The logger will not clear log files when you start the logger you should ensure the log directory is empty. 101 | When logging to a csv file you can change the default header order using a header file. 102 | Each topic requires its own header entry. Below is an example header file: 103 | 104 | test/sensor1,time_ms,time,ms,Urms,Umin,Umax 105 | test/sensor2,time_ms,time,ms,Urms,Umin,Umax 106 | test/sensor3,time_ms,time,sensor,count,status 107 | test/sensor4,time_ms,time,ms,Urms,Umin,Umax,count 108 | 109 | You can see that topics sensor1 and sensor2 use the same header whereas sensor3 and sensor4 have different headers. 110 | Because topics can have different json foramts the better option is to let the script build the header file rather than supplying one. 111 | -------------------------------------------------------------------------------- /tlogger.py: -------------------------------------------------------------------------------- 1 | import time,os,json,logging,csv 2 | #doesn't use writer 3 | class T_logger(): 4 | def __init__(self,log_dir="tlogs",MAX_LOG_SIZE=5000,csv_flag=False): 5 | self.MAX_LOG_SIZE=MAX_LOG_SIZE 6 | self.log_root_dir=log_dir 7 | self.metafile="metafile.txt" 8 | self.topics={}#key=topic name\ 9 | #data is array[file pointer,directory,counter] 10 | self.create_log_dir(self.log_root_dir) 11 | print("Max log size= ",MAX_LOG_SIZE) 12 | self.columns = ['time','message']#needed as get error when storing json 13 | self.csv_flag = csv_flag 14 | self.header_flag=False 15 | def write_header(self,fo,columns): 16 | self.header_flag=True 17 | try: 18 | fo.write(columns) 19 | fo.write("\n") 20 | except BaseException as e: 21 | logging.error("Error on_data: %s" % str(e)) 22 | return False 23 | self.__flushlogs(fo) 24 | 25 | def set_headers(self,headers): 26 | #not currently used 27 | self.headers = headers 28 | self.header_flag=True 29 | def extract_columns(self,data): 30 | columns="" 31 | 32 | #data=flatten_dict(msg) 33 | for key in data: 34 | #print("key =",key) 35 | if columns =="": 36 | columns=key 37 | else: 38 | columns=columns+","+key 39 | #print(columns) 40 | return(columns) 41 | def extract_data(self,data): 42 | line_out="" 43 | for key in data: 44 | #print("here ",data[key]) 45 | if line_out =="": 46 | line_out=str(data[key]) 47 | else: 48 | line_out=line_out+","+str(data[key]) 49 | #print(line_out) 50 | return(line_out) 51 | 52 | def __flushlogs(self,fo): # write to disk 53 | fo.flush() 54 | os.fsync(fo.fileno()) 55 | 56 | def create_log_dir(self,log_dir): 57 | try: 58 | os.stat(log_dir) 59 | except: 60 | os.mkdir(log_dir) 61 | 62 | def close_file(self): 63 | print("closing files ") 64 | 65 | for key in self.topics: 66 | fo=self.topics[key][0] 67 | if not fo.closed: 68 | fo.close() 69 | def write(self,fo,data): 70 | if self.csv_flag: 71 | data=self.extract_data(data) 72 | try: 73 | fo.write(data) 74 | fo.write("\n") 75 | except BaseException as e: 76 | logging.error("Error on_data: %s" % str(e)) 77 | return False 78 | self.__flushlogs(fo) 79 | def update_topic_counter(topic,count): 80 | pass 81 | 82 | 83 | def create_log_file(self,dir,topic,columns,fo="",count=0): 84 | 85 | log_numbr="{0:003d}".format(count) 86 | logging.info("log number "+str(log_numbr)+ " dir "+dir) 87 | filename= "log"+str(log_numbr)+".txt" 88 | try: 89 | os.stat(filename) 90 | os.remove(filename)#remove old log if exists 91 | except: 92 | pass 93 | filename=dir+"/"+filename 94 | logging.info("Creating log "+str(count)) 95 | 96 | if count==0: 97 | pass 98 | else: 99 | fo.close() #close old log file 100 | 101 | #update_topic_counter(topic,count) 102 | fo=open(filename, 'w') 103 | count+=1 104 | 105 | self.topics[topic]=[fo,dir,filename,count,columns] 106 | return (fo) 107 | 108 | 109 | def log_json(self,data): 110 | topic=data["topic"] #get topic from data 111 | del data["topic"] #no need to store topic 112 | jdata=json.dumps(data) 113 | self.log_data(jdata,topic) 114 | def log_data(self,data,topic=""): 115 | columns=0 #needed as json data causes error 116 | if topic=="": 117 | topic=data["topic"] 118 | del data["topic"] #no need to store topic 119 | 120 | 121 | if topic in self.topics: 122 | 123 | fo=self.topics[topic][0] #retrieve pointer 124 | #writer=self.topics[topic][4] #retrieve pointer 125 | self.write(fo,data) 126 | file=self.topics[topic][2] 127 | #need to create new log file 128 | if os.stat(file).st_size>self.MAX_LOG_SIZE: 129 | dir=self.topics[topic][1] 130 | count=self.topics[topic][3] 131 | fo=self.create_log_file(dir,topic,columns,fo,count) 132 | if self.csv_flag: 133 | columns=self.extract_columns(data) 134 | self.write_header(fo,columns) 135 | self.topics[topic][0]=fo 136 | self.topics[topic][4]=columns 137 | #self.write(fo,data) 138 | else: 139 | #store file name and pointers 140 | 141 | s_topics=topic.split('/') 142 | dir=self.log_root_dir 143 | for t in s_topics: 144 | dir=dir+"/"+t 145 | self.create_log_dir(dir) 146 | fo=self.create_log_file(dir,topic,columns,fo="",count=0) 147 | if self.csv_flag: 148 | columns=self.extract_columns(data) 149 | self.write_header(fo,columns) 150 | self.topics[topic][0]=fo 151 | self.topics[topic][4]=columns 152 | self.write(fo,data) 153 | 154 | 155 | -------------------------------------------------------------------------------- /mqtt-topic-logger.py: -------------------------------------------------------------------------------- 1 | #!c:\python34\python 2 | #!python3 3 | ###demo code provided by Steve Cope at www.steves-internet-guide.com 4 | ##email steve@steves-internet-guide.com 5 | ###Free to use for any purpose 6 | """ 7 | This will log messages to file.Los time,message and topic as JSON data 8 | """ 9 | mqttclient_log=False #MQTT client logs showing messages 10 | Log_worker_flag=True 11 | import paho.mqtt.client as mqtt 12 | import json 13 | import os 14 | import time 15 | import sys, getopt,random 16 | import logging 17 | import tlogger 18 | import threading 19 | from queue import Queue 20 | from command import command_input 21 | import command 22 | import collections 23 | 24 | 25 | 26 | q=Queue() 27 | 28 | ##helper functions 29 | def convert(t): 30 | d="" 31 | for c in t: # replace all chars outside BMP with a ! 32 | d =d+(c if ord(c) < 0x10000 else '!') 33 | return(d) 34 | ### 35 | def getheader(file_name): 36 | headers={} 37 | fp=open(file_name,'r') 38 | for line in fp: 39 | line=line.strip() 40 | #print("line =",line) 41 | data=line.split(",") 42 | x=data.pop(0) 43 | #print(data) 44 | headers[x]=data 45 | 46 | return headers 47 | ############## 48 | class MQTTClient(mqtt.Client):#extend the paho client class 49 | run_flag=False #global flag used in multi loop 50 | def __init__(self,cname,**kwargs): 51 | super(MQTTClient, self).__init__(cname,**kwargs) 52 | self.topic_ack=[] #used to track subscribed topics 53 | self.subscribe_flag=False 54 | self.bad_connection_flag=False 55 | self.bad_count=0 56 | self.count=0 57 | self.connected_flag=False 58 | self.connect_flag=False #used in multi loop 59 | self.sub_topic="" 60 | self.sub_topics=[] #multiple topics 61 | self.sub_qos=0 62 | self.broker="" 63 | self.port=1883 64 | self.keepalive=60 65 | self.cname="" 66 | self.delay=10 #retry interval 67 | 68 | def Initialise_clients(cname,mqttclient_log=False,cleansession=True,flags=""): 69 | #flags set 70 | 71 | logging.info("initialising clients") 72 | client= MQTTClient(cname,clean_session=cleansession) 73 | client.cname=cname 74 | client.on_connect= on_connect #attach function to callback 75 | client.on_message=on_message #attach function to callback 76 | if mqttclient_log: 77 | client.on_log=on_log 78 | return client 79 | 80 | def on_connect(client, userdata, flags, rc): 81 | """ 82 | set the bad connection flag for rc >0, Sets onnected_flag if connected ok 83 | also subscribes to topics 84 | """ 85 | logging.debug("Connected flags"+str(flags)+"result code "\ 86 | +str(rc)+"client1_id") 87 | 88 | if rc==0: 89 | client.connected_flag=True #old clients use this 90 | client.bad_connection_flag=False 91 | if client.sub_topic!="": #single topic 92 | logging.info("subscribing "+str(client.sub_topic)) 93 | topic=client.sub_topic 94 | if client.sub_qos!=0: 95 | qos=client.sub_qos 96 | client.subscribe(topic,qos) 97 | elif client.sub_topics!="": 98 | 99 | client.subscribe(client.sub_topics) 100 | print("Connected and subscribed to ",client.sub_topics) 101 | 102 | else: 103 | client.bad_connection_flag=True # 104 | client.bad_count +=1 105 | client.connected_flag=False # 106 | def on_message(client,userdata, msg): 107 | topic=msg.topic 108 | m_decode=str(msg.payload.decode("utf-8","ignore")) 109 | message_handler(client,m_decode,topic) 110 | #print("message received ",topic) 111 | 112 | def message_handler(client,msg,topic): 113 | data=collections.OrderedDict() 114 | tnow=time.time() 115 | try: 116 | msg=json.loads(msg)#convert to Javascript before saving 117 | json_flag=True 118 | except: 119 | json_flag=False 120 | #print("not already json") 121 | 122 | s=time.localtime(tnow) 123 | 124 | year=str(s[0]) 125 | month=s[1] 126 | if month <10: 127 | month="0"+str(month) 128 | day =s[2] 129 | if day<10: 130 | day="0"+str(day) 131 | hours=s[3] 132 | if hours<10: 133 | hours="0"+str(hours) 134 | m=s[4] 135 | if m<10: 136 | m="0"+str(m) 137 | sec=s[5] 138 | if sec<10: 139 | sec="0"+str(sec) 140 | 141 | ltime =str(year) + "-" + str(month) + "-" + str(day) + "_" + str(hours) 142 | ltime=ltime + ":" + str(m) + ":" + str(sec) 143 | #print("time ",ltime) 144 | data["time_ms"]=int(tnow*1000) 145 | data["time"]=ltime 146 | data["topic"]=topic 147 | if json_flag and csv_flag: 148 | keys=msg.keys() 149 | for key in keys: 150 | data[key]=msg[key] 151 | else: 152 | data["message"]=msg 153 | 154 | 155 | if command.options["storechangesonly"]: 156 | if has_changed(client,topic,msg): 157 | client.q.put(data) #put messages on queue 158 | else: 159 | client.q.put(data) #put messages on queue 160 | 161 | def has_changed_test(client,topic,msg): 162 | #used when testing the data log tester 163 | if topic in client.last_message: 164 | if client.last_message[topic]["status"]==msg["status"]: 165 | return False 166 | client.last_message[topic]=msg 167 | return True 168 | 169 | def has_changed(client,topic,msg): 170 | #print("has changed ",options["testmode"]) 171 | if topic in client.last_message: 172 | if client.last_message[topic]==msg: 173 | return False 174 | client.last_message[topic]=msg 175 | return True 176 | ### 177 | def log_worker(): 178 | """runs in own thread to log data from queue""" 179 | while Log_worker_flag: 180 | #print("worker running ",csv_flag) 181 | time.sleep(0.01) 182 | #time.sleep(2) 183 | while not q.empty(): 184 | results = q.get() 185 | if results is None: 186 | continue 187 | if csv_flag: 188 | log.log_data(results) 189 | #print("message saved csv") 190 | else: 191 | log.log_json(results) 192 | #print("message saved json") 193 | log.close_file() 194 | # MAIN PROGRAM 195 | options=command.options 196 | 197 | if __name__ == "__main__" and len(sys.argv)>=2: 198 | options=command_input(options) 199 | else: 200 | print("Need broker name and topics to continue.. exiting") 201 | raise SystemExit(1) 202 | 203 | #verbose=options["verbose"] 204 | 205 | if not options["cname"]: 206 | r=random.randrange(1,10000) 207 | cname="logger-"+str(r) 208 | else: 209 | cname="logger-"+str(options["cname"]) 210 | Levels=["DEBUG","INFO","WARNING","ERROR","CRITICAL"] 211 | 212 | print("logging level ",options["loglevel"]) 213 | logging.basicConfig(level=options["loglevel"]) 214 | logging.basicConfig(level="INFO") 215 | log_dir=options["log_dir"] 216 | 217 | 218 | logging.info("creating client"+cname) 219 | 220 | client=Initialise_clients(cname,mqttclient_log,False)#create and initialise client object 221 | if options["username"] !="": 222 | client.username_pw_set(options["username"], options["password"]) 223 | 224 | client.sub_topics=options["topics"] 225 | client.broker=options["broker"] 226 | client.port=options["port"] 227 | 228 | 229 | if options["JSON"]: # 230 | csv_flag=False 231 | if options["csv"]: 232 | csv_flag=True 233 | options["JSON"]=False 234 | print("Logging csv format") 235 | if options["JSON"]: 236 | print("Logging JSON format") 237 | if options["storechangesonly"]: 238 | print("starting storing only changed data") 239 | else: 240 | print("starting storing all data") 241 | 242 | ## 243 | log=tlogger.T_logger(log_dir,options["log_records"],csv_flag) 244 | print("Log Directory =",log_dir) 245 | if options["header_flag"]: # 246 | 247 | file_name=options["fname"] 248 | headers={} 249 | headers=getheader(file_name) 250 | log.set_headers(headers) 251 | print("getting headers from ",file_name) 252 | #print(headers) 253 | Log_worker_flag=True 254 | t = threading.Thread(target=log_worker) #start logger 255 | t.start() #start logging thread 256 | ### 257 | 258 | client.last_message=dict() 259 | client.q=q #make queue available as part of client 260 | 261 | 262 | 263 | try: 264 | res=client.connect(client.broker,client.port) #connect to broker 265 | print("connecting to broker",client.broker) 266 | client.loop_start() #start loop 267 | 268 | except: 269 | logging.debug("connection failed") 270 | print("connection failed") 271 | client.bad_count +=1 272 | client.bad_connection_flag=True #old clients use this 273 | #loop and wait until interrupted 274 | try: 275 | while True: 276 | time.sleep(1) 277 | pass 278 | 279 | except KeyboardInterrupt: 280 | print("interrrupted by keyboard") 281 | 282 | client.loop_stop() #start loop 283 | Log_worker_flag=False #stop logging thread 284 | time.sleep(5) 285 | 286 | --------------------------------------------------------------------------------