├── .gitignore ├── @DataReporter ├── DataReporter.m ├── ExportDatasource.m ├── RefreshDatastore.m ├── checkDetails.m └── private │ ├── getDatasourceDrive.m │ ├── getDetails.m │ └── updateDriveInfo.m ├── @Datamaster ├── CheckLogFileStatus.m ├── Datamaster.m ├── addDatasource.m ├── addEntry.m ├── allLogged.m ├── colormap.m ├── getConfigSetting.m ├── getDatasource.m ├── getEntry.m ├── getIndex.m └── private │ ├── connectSQLite.m │ ├── getGitInfo.m │ ├── getKeyValue.m │ ├── reportGitInfo.m │ ├── validateDatetime.m │ └── validateHash.m ├── @datasource ├── CDF2.m ├── Histogram.m ├── Histogram2.m ├── Sync.m ├── TimePlot.m ├── Transient.m ├── datasource.m ├── driveTime.m ├── getChannel.m ├── getDetail.m ├── loadChannel.m ├── mapReduce.m └── openInMoTeC.m ├── @sqlite └── sqlite.m ├── DataHash.m ├── DatamasterSetup.mlapp ├── LICENSE ├── Legal ├── DataHash_license.txt ├── NumPy_license.txt ├── SQLite_license.txt ├── pint_license.txt └── textprogressbar_license.txt ├── Python └── ConnectGoogleDrive.py ├── README.md ├── SQLQueries ├── Channels.sql ├── DetailLog.sql ├── DetailName.sql └── MasterDirectory.sql ├── TestSuite ├── CheckBuild.m ├── testDataReporter.m ├── testDatamaster.m └── testGetDetail.m ├── UnitDefine.txt ├── convertUnit.m ├── default.ini └── textprogressbar.m /.gitignore: -------------------------------------------------------------------------------- 1 | Datastore/ 2 | *.asv 3 | *.mat 4 | *.sqlite 5 | 6 | #Ignore folders 7 | html/** 8 | **/__pycache__/ 9 | Dependencies/** 10 | 11 | #Ignore Config Files 12 | *.ini 13 | !default.ini 14 | 15 | #Ignore zip folders 16 | *.zip 17 | 18 | #Ignore MoTeC Log Files 19 | *.ld 20 | *.ldx 21 | 22 | *.sublime-* -------------------------------------------------------------------------------- /@DataReporter/DataReporter.m: -------------------------------------------------------------------------------- 1 | classdef DataReporter 2 | %Class to Handle Updating the Datastore and generating reports 3 | 4 | properties 5 | %Get Access to Datamaster 6 | dm = Datamaster; 7 | stats = struct; 8 | end 9 | 10 | 11 | methods 12 | function dr = DataReporter() 13 | %Add Python Folder to search path 14 | P = py.sys.path; 15 | pyFolder = fullfile(Datamaster.getPath, 'Python'); 16 | if count(P,pyFolder) == 0 17 | insert(P,int32(0),pyFolder); 18 | end 19 | end 20 | 21 | %% Function Signatures 22 | stats = RefreshDatastore(dr) 23 | 24 | 25 | 26 | checkDetails(dr,hash); 27 | end 28 | 29 | methods(Static) 30 | [Details] = getDetails(idxFilename) 31 | end 32 | methods(Static, Access = private) 33 | stats = RecursivelyOpen(s) 34 | [success,FinalHash] = ExportDatasource(dm,i2,MoTeCFile) 35 | end 36 | 37 | end 38 | 39 | -------------------------------------------------------------------------------- /@DataReporter/ExportDatasource.m: -------------------------------------------------------------------------------- 1 | function [success,FinalHash] = ExportDatasource(dm,i2,MoTeCFile) 2 | %Interface with the MoTeC i2Pro Application to control the exporting of a 3 | %log file as a .mat file 4 | % MoTeCFile: Struct w/ the following fields 5 | % ld: The file id of the .ld file to download 6 | % ldx: The file id of the .ldx file to download 7 | % OriginHash: MD5 Hash of the .ld and .ldx files 8 | 9 | %Set Success Flag 10 | success = false; 11 | FinalHash = ''; 12 | 13 | fprintf('Exporting: %s',MoTeCFile.OriginHash); 14 | startExport = tic; 15 | try 16 | %Create a temporary mat file that will be overwritten by i2Pro 17 | saveFile = [tempname '.mat']; 18 | save(saveFile, 'saveFile'); 19 | 20 | %Download the .ld and .ldx file from Google Drive 21 | [ldPath, ldxPath] = getDatasourceDrive(MoTeCFile); 22 | 23 | %% Export Datasource as .mat 24 | %Export to the Temp File then copy to a new file 25 | tic 26 | fprintf('\tExporting from MoTeC...') 27 | i2.DataSources.Open(ldPath); 28 | i2.DataSources.ExportMainAsMAT(saveFile); 29 | i2.DataSources.CloseAll; 30 | 31 | %Extract the Details 32 | Details = getDetails(ldxPath); 33 | fprintf('done in %3.2f s\n',toc); 34 | 35 | %Request to add Datasource to the Datastore 36 | tic 37 | fprintf('\tAdding to database...') 38 | FinalHash = dm.addDatasource(MoTeCFile,saveFile,Details); 39 | fprintf('done in %3.2f s\n',toc); 40 | 41 | %Clean up 42 | delete(ldPath); delete(ldxPath); %Remove MoTeC Files 43 | success = true; 44 | fprintf('\tdone in %3.2f s.\n',toc(startExport)) 45 | catch e 46 | switch e.identifier 47 | case 'MATLAB:COM:E0' 48 | %MoTeC Export Failed 49 | fprintf('Export Failed\n') 50 | 51 | %Delete Temporary mat file if it exist 52 | if exist(saveFile,'file') 53 | delete(saveFile); 54 | end 55 | 56 | %Delete ld file if it exist 57 | if exist(ldPath,'file') 58 | delete(ldPath); 59 | end 60 | 61 | %Delete ldx mat file if it exist 62 | if exist(ldxPath,'file') 63 | delete(ldxPath); 64 | end 65 | otherwise 66 | %Something Bad Happended 67 | rethrow(e); 68 | end 69 | %Clean up Temp File 70 | if exist(saveFile,'var') 71 | delete(saveFile); 72 | end 73 | end 74 | end -------------------------------------------------------------------------------- /@DataReporter/RefreshDatastore.m: -------------------------------------------------------------------------------- 1 | function stats = RefreshDatastore(dr) 2 | %Searches the ADL3 Data Folder on Google Drive for any new MoTeC Log 3 | %Files. New Log Files are then exported and added to the Datastore by 4 | %Datamaster. 5 | %Also records summary statistics such as number of new, modified and 6 | %corrupt log files currently on the Google Drive. 7 | %Returns a cell array containing the OriginHashes of all newly exported 8 | %Log Files 9 | 10 | %Create Stats to track progress 11 | stats = struct('New',0,'Duplicate',0,'Modified',0,'Prior',0,'Corupt',0,'NewHash',''); 12 | 13 | %Assert that the host computer is a pc 14 | assert(ispc,'Only a pc can export MoTeC Log files') 15 | 16 | %Change the working directory to the datamaster folder 17 | cd(fileparts(which('reportGitInfo'))); 18 | 19 | %Get all MoTeC Log files stored on google drive 20 | MoTeCFile = updateDriveInfo(); 21 | 22 | %Create MoTeC COM Server 23 | i2 = actxserver('MoTeC.i2Application'); 24 | pause(1); %Wait for MoTeC to Open 25 | 26 | %Pull Datamaster handle from DataReporter 27 | dm = dr.dm; 28 | 29 | %Set Up Console 30 | startTime = tic; 31 | fprintf('\nSearching for New MoTeC Log Files...\n'); 32 | 33 | %Loop over all log files 34 | for i = 1:length(MoTeCFile) 35 | %Check if file as already been exported 36 | switch dm.CheckLogFileStatus(MoTeCFile(i)) 37 | case 'new' 38 | %New Data source -> Export 39 | [success, FinalHash] = DataReporter.ExportDatasource(dm, i2, MoTeCFile(i)); 40 | 41 | %Check if export was successful 42 | if success 43 | stats.New = stats.New +1; 44 | stats.NewHash{end+1} = FinalHash; 45 | else 46 | stats.Corupt = stats.Corupt +1; 47 | end 48 | case 'corrupt' 49 | stats.Corupt = stats.Corupt +1; 50 | fprintf('Corrupt: %s...Skipping\n',MoTeCFile(i).OriginHash); 51 | case 'modified' 52 | stats.Modified = stats.Modified +1; 53 | fprintf('Modified: %s...Skipping\n',MoTeCFile(i).OriginHash); 54 | case 'duplicate' 55 | stats.Duplicate = stats.Duplicate +1; 56 | fprintf('Duplicate: %s...Skipping\n',MoTeCFile(i).OriginHash); 57 | case 'exported' 58 | stats.Prior = stats.Prior +1; 59 | fprintf('Prior Export: %s...Skipping\n',MoTeCFile(i).OriginHash); 60 | end 61 | 62 | end 63 | 64 | %Close i2 Pro 65 | i2.Exit; 66 | 67 | %Report Duration 68 | fprintf('\n\nDatabase Refresh Complete\n'); 69 | 70 | %Report Export Stats 71 | vars = fieldnames(stats); 72 | for i = 1:length(vars) 73 | switch class(stats.(vars{i})) 74 | case 'double' 75 | fprintf('%s Log Files: %.1d\n',vars{i},stats.(vars{i})); 76 | case 'cell' 77 | fprintf('Newly Exported Log Files:\n') 78 | 79 | temp = stats.(vars{i}); 80 | fprintf('\t%s\n',temp{:}); 81 | end 82 | end 83 | fprintf('\n') 84 | toc(startTime); 85 | 86 | %Get Git Version and report 87 | reportGitInfo; 88 | 89 | -------------------------------------------------------------------------------- /@DataReporter/checkDetails.m: -------------------------------------------------------------------------------- 1 | function checkDetails(dr,hash) 2 | %Checks that the datasources (Given by hash) followed Standard Data 3 | %Logging Practices 4 | 5 | %Get Datasources 6 | ds = dr.dm.getDatasource(hash); 7 | Error = zeros(1,length(ds)); 8 | 9 | %Check Each Datasource 10 | for i = 1:length(ds) 11 | %%Report Current Log File 12 | fprintf('Logged Date: %s\n',ds(1).getEntry.Datetime) 13 | fprintf('Origin Hash: %s\n',ds(i).getEntry.OriginHash); 14 | 15 | %% Validate Event 16 | Error(i) = Error(i) + SelectionField(ds(i),'Event',... 17 | {'ACCEL', 'AUTOX', 'ENDUR', 'SKID', 'HOLD', 'SWEEP', 'WARM', 'OTHER'}); 18 | 19 | %% Validate Venue 20 | Error(i) = Error(i) + SelectionField(ds(i),'Venue',... 21 | {'BLOT', 'DYNO', 'LAB', 'MIS', 'GLEN', 'DESTINY', 'GROTON'}); 22 | 23 | %% Validate Engine ID -> Tune File name 24 | %TODO: Find Tune Files 25 | fprintf('\t%s: ''%s''\n','Engine ID',ds(i).getDetail('EngineID')); 26 | 27 | %% Validate Vehicle ID -> Vehicle Configuration Spreadsheet Filename 28 | %TODO: Find/Set up Spreadsheet 29 | fprintf('\t%s: ''%s''\n','Vehicle ID',ds(i).getDetail('VehicleID')); 30 | 31 | %% Validate Driver -> Must be a NetID 32 | Error(i) = Error(i) + RegExpTester(ds(i),'Driver','^(([a-z]{2,3}\d+)\s*)+$'); 33 | 34 | %% Validate Session -> Subteam ID + netid 35 | SubteamID = {'DRIVE', 'DYNO', 'FLOW', 'AERO', 'ERGO', 'UNSPRUNG', 'DTRAIN' }; %Cell Array of Allowed Subteam ID's 36 | 37 | %Check if valid entry 38 | Error(i) = Error(i) + RegExpTester(ds(i),'Session',... 39 | ['^(' strjoin(SubteamID,'|') ')[^a-z0-9]([a-z]{2,3}\d+)$']); 40 | 41 | %% Validate Short Comment 42 | Sys_Id = {'ENG', 'CHA', 'EE'}; 43 | FaultCode = {'ISU', 'FAIL', 'CAT'}; 44 | 45 | Error(i) = Error(i) + RegExpTester(ds(i),'Short',... 46 | ['^(' strjoin(Sys_Id,'|') ')[^a-z0-9](' strjoin(FaultCode,'|') ')$']); 47 | 48 | %Insert Space before Next Report 49 | fprintf('\n'); 50 | end 51 | 52 | end 53 | 54 | function valid = SelectionField(ds,Field,Allowed) 55 | %Checks if field in Details was filled in with one of the allowed values 56 | %If it wasn't report an error 57 | 58 | fieldValue = ds.getDetail(Field); 59 | regexpStr = ['^(' strjoin(Allowed,'|') ')$']; 60 | 61 | if ~any(regexpi(fieldValue,regexpStr)) 62 | fprintf('\tIllegal Entry: ''%s'' is not a valid entry for %s\n',fieldValue,Field) 63 | valid = false; 64 | else 65 | fprintf('\t%s: ''%s''\n',Field,fieldValue); 66 | valid = true; 67 | end 68 | end 69 | 70 | function valid = RegExpTester(ds,Field,regexpStr) 71 | %Uses the supplied RegExp to check if Field in the datasource's details was completed correctly 72 | 73 | fieldValue = ds.getDetail(Field); 74 | 75 | if ~any(regexpi(fieldValue,regexpStr)) 76 | fprintf('\tIllegal Entry: ''%s'' is not a valid entry for %s\n',fieldValue,Field) 77 | valid = false; 78 | else 79 | fprintf('\t%s: ''%s''\n',Field,fieldValue); 80 | valid = true; 81 | end 82 | end -------------------------------------------------------------------------------- /@DataReporter/private/getDatasourceDrive.m: -------------------------------------------------------------------------------- 1 | function [ldLoc, ldxLoc] = getDatasourceDrive(MoTeCFile,savePath) 2 | % Polls Google Drive to download the requested MoTeC Log files 3 | % MoTeCFile: Struct w/ the following fields 4 | % ld: The file id of the .ld file to download 5 | % ldx: The file id of the .ldx file to download 6 | % 7 | % ldLoc: filepath where the .ld file is saved 8 | % ldxLoc: filepath where the .ldx file is saved 9 | 10 | %Report Start of Download 11 | fprintf('\n\tDownloading from Google Drive...'); 12 | sTime = tic; 13 | 14 | %Download the .ld and .ldx files to temporary files 15 | ldLoc = retryDownload( [tempname '.ld'], MoTeCFile.ldLink); 16 | ldxLoc = retryDownload([tempname, '.ldx'], MoTeCFile.ldxLink); 17 | 18 | %Report Done 19 | fprintf('done in %3.2f s\n',toc(sTime));tic 20 | end 21 | 22 | function saveLoc = retryDownload(saveLoc, url) 23 | %Download the given Goolge Drive File ID using it's webContentLink 24 | % saveLoc: path to save the downloaded file 25 | % url: Link used to download the file 26 | 27 | %Setting for back off 28 | maxTries = 5; %Max Attempts to make 29 | pauseTime = 0.5; %Baseline time to wait 30 | 31 | nTry = 0; 32 | while nTry < maxTries 33 | try 34 | %Attempt to download the file 35 | saveLoc = websave(saveLoc, url); 36 | break 37 | catch e 38 | %If error is thrown due to timeout -> retry, otherwise rethrow 39 | if ~any(strcmp(e.identifier,{'MATLAB:webservices:Timeout',... 40 | 'MATLAB:webservices:CopyContentToDataStreamError',... 41 | 'MATLAB:webservices:HTTP404StatusCodeError'})) 42 | rethrow(e) 43 | else 44 | fprintf('timeout...'); 45 | pause(pauseTime*2^nTry); 46 | nTry = nTry +1; 47 | end 48 | end 49 | end 50 | end -------------------------------------------------------------------------------- /@DataReporter/private/getDetails.m: -------------------------------------------------------------------------------- 1 | function [Details] = getDetails(idxFilename) 2 | %UNTITLED Summary of this function goes here 3 | % Detailed explanation goes here 4 | 5 | %Read in the idx file to a string 6 | str = fileread(idxFilename); 7 | 8 | %Initalize Details 9 | Details = struct; 10 | 11 | %Strip to just the details section 12 | str = regexpi(str,'
([\s\S]+)<\/Details>','tokens'); 13 | 14 | %Check if Details were logged 15 | if ~isempty(str) 16 | %Extract Details block for str cell array 17 | str = str{:}{:}; 18 | 19 | %Extract each field 20 | fields = regexpi(str,'<(.+?)\/>','tokens'); 21 | 22 | %Create the Details Struct 23 | for i = 1:length(fields) 24 | %Grab the current Field sub cell 25 | current = regexpi(fields{i}, '(String|Numeric|DateTime) Id="(.+?)" Value="(.*?)"( Unit="(.*?)")?', 'tokens'); 26 | %Skip to next field if current is empty 27 | if isempty(current{:}) 28 | continue 29 | end 30 | 31 | current = current{:}{:}; 32 | 33 | %Match Unit 34 | unit = regexpi(current{4},'"(.*)"','tokens'); 35 | if ~isempty(unit) 36 | current{4} = unit{:}{:}; 37 | end 38 | 39 | %Create a valid struct name 40 | current{2} = matlab.lang.makeValidName(current{2}); 41 | if ~strcmp(current{1},'Numeric') || length(current)~=4 42 | Details.(current{2}) = current{3}; 43 | else 44 | Details.(current{2}).Value = current{3}; 45 | Details.(current{2}).Unit = current{4}; 46 | end 47 | end 48 | end 49 | 50 | end 51 | 52 | -------------------------------------------------------------------------------- /@DataReporter/private/updateDriveInfo.m: -------------------------------------------------------------------------------- 1 | function datasource = updateDriveInfo() 2 | %Import Python Module for downloading Log File metadata 3 | import py.ConnectGoogleDrive.* 4 | 5 | % Poll Google Drive for missing files 6 | fprintf('Polling Google Drive for new files...') 7 | client_id = Datamaster.getConfigSetting('client_id'); 8 | client_secret = Datamaster.getConfigSetting('client_secret'); 9 | files = cell(py.ConnectGoogleDrive.getFileList(client_id, client_secret)); 10 | fprintf('done\n') 11 | 12 | %Convert from py.dict to struct 13 | files = cellfun(@struct,files,'UniformOutput',false); 14 | 15 | %Convert py.str to char 16 | files = cellfun(@(x) structfun(@char,x,'UniformOutput',false),... 17 | files,'UniformOutput',false); 18 | 19 | %Remove items without all fields present 20 | fields = {'id','name','md5Checksum'}; 21 | index = cellfun(@(x) all(isfield(x,fields)),files); %Find cells missing parameters 22 | files = [files{index}]; %Only keep the ones with everything 23 | 24 | %Enumerate File type 25 | fileType = zeros(1,length(files)); 26 | ft.ld = 1; ft.ldx = 2; 27 | 28 | %Loop over files 29 | for i = 1:length(files) 30 | %% Identify File Type 31 | if strcmp(files(i).name(end-2:end),'.ld') 32 | fileType(i) = ft.ld; 33 | elseif strcmp(files(i).name(end-3:end),'.ldx') 34 | fileType(i) = ft.ldx; 35 | end 36 | 37 | %Drop extension from name 38 | [~,name] = fileparts(files(i).name); 39 | files(i).name = name; 40 | end 41 | 42 | %Drop files that are not log files 43 | files(~fileType) = []; fileType(~fileType) =[]; 44 | 45 | %Sort by createdTime: Oldest First 46 | [~,index] = sortrows({files.modifiedTime}'); 47 | files = files(index); fileType = fileType(index); 48 | 49 | %Find all unique .ld files pick the oldest one as real 50 | ldIndex = find(fileType==ft.ld); 51 | [~,ldIndexUnique] = unique({files(ldIndex).md5Checksum},'first'); 52 | ldIndex = ldIndex(ldIndexUnique); 53 | 54 | %Match each .ld file to its .ldx file 55 | ldxIndex = find(fileType==ft.ldx); 56 | [~,ldxTemp,ldTemp] = intersect({files(ldxIndex).name},{files(ldIndex).name}); 57 | ldIndex = ldIndex(ldTemp); ldxIndex = ldxIndex(ldxTemp); 58 | 59 | %Consolidate datasources into a paired array 60 | datasource = struct('OriginHash',{},'ld',{},'ldx',{},'name',{},... 61 | 'ldLink', {}, 'ldxLink', {}); 62 | 63 | %Start at i = length(ldIndex) to set datasource length 64 | for i = length(ldIndex):-1:1 65 | % Populate Fields in datasource 66 | datasource(i).OriginHash = ... 67 | [files(ldIndex(i)).md5Checksum files(ldxIndex(i)).md5Checksum]; 68 | datasource(i).ld = files(ldIndex(i)).id; 69 | datasource(i).ldx = files(ldxIndex(i)).id; 70 | datasource(i).name = files(ldIndex(i)).name; 71 | datasource(i).createdTime = files(ldIndex(i)).modifiedTime; 72 | datasource(i).ldLink = files(ldIndex(i)).webContentLink; 73 | datasource(i).ldxLink = files(ldxIndex(i)).webContentLink; 74 | end 75 | 76 | 77 | -------------------------------------------------------------------------------- /@Datamaster/CheckLogFileStatus.m: -------------------------------------------------------------------------------- 1 | function status = CheckLogFileStatus(dm,item) 2 | %Checks if a Log File has been exported, was modified or new 3 | % item is structure with the following fields 4 | % OriginHash: The concatenated MD5 hashes of the .ld and .ldx [ldHash ldxHash] 5 | % ld: The id of the .ld file on Google Drive 6 | % ldx: The ldx of the .ldx file on Google Drive 7 | 8 | % Compare OriginHash to MasterDatabase 9 | 10 | %Find Records with a matching origin hash 11 | query = sprintf('select id from masterDirectory where OriginHash=''%s''',item.OriginHash); 12 | OriginMatch = dm.mDir.fetch(query); 13 | 14 | %Find Records with matching .ld or .ldx files 15 | query = sprintf('select id from masterDirectory where ldId=''%s'' OR ldxId=''%s''',item.ld,item.ldx); 16 | PathMatch = dm.mDir.fetch(query); 17 | 18 | %% Determine Status of Log File 19 | if isempty(OriginMatch) 20 | if isempty(PathMatch) 21 | %New Log File 22 | status = 'new'; 23 | else 24 | %Log File has been modified 25 | status = 'modified'; 26 | end 27 | else 28 | if isempty(PathMatch) 29 | %Duplicate Log File 30 | status = 'duplicate'; 31 | else 32 | %Log file has already been exported 33 | status = 'exported'; 34 | end 35 | end 36 | end -------------------------------------------------------------------------------- /@Datamaster/Datamaster.m: -------------------------------------------------------------------------------- 1 | classdef Datamaster < handle 2 | % Datamaster handles exporting i2 Pro Log files from MoTeC i2 Pro, 3 | % curating the exported datasources, enforcing good data practices and 4 | % providing access to the datasource en mass 5 | 6 | properties (Access = private) 7 | mDir = []; %Connection Handle to the SQL Database 8 | Datastore = nan %Location of the Datastore 9 | 10 | Details = []; %Stores a list of Details and their keys 11 | Channels = []; %Stores a list of Channels and their keys 12 | 13 | %Hashing Settings 14 | HashOptions = struct('Method','SHA-256',... 15 | 'Format','hex',... 16 | 'Input','file'); 17 | end 18 | 19 | methods 20 | %% Class Constructor 21 | function dm = Datamaster() 22 | %Report Current Version info 23 | reportGitInfo; 24 | 25 | % Set Datastore Path 26 | dm.Datastore = Datamaster.getConfigSetting('datastore_path'); 27 | 28 | % Load Database 29 | mDirPath = Datamaster.getConfigSetting('master_directory_path'); 30 | dm.mDir = connectSQLite(mDirPath); 31 | 32 | % Get a list of Details and Channels that have been logged 33 | dm.updateDetails; dm.updateChannels; 34 | 35 | %% Check for Updates - But Only once per session 36 | persistent flagChecked 37 | if isempty(flagChecked) 38 | try 39 | %Move to the Datamaster folder 40 | savedCd = cd; cd(Datamaster.getPath); 41 | 42 | %Fetch updates from remote 43 | [success, ~] = system('git fetch'); 44 | assert(success==0, 'Error fetching updates from remote'); 45 | 46 | %Check Current Status 47 | [success, str] = system('git status'); 48 | assert(success==0, 'Error reading git repo status'); 49 | 50 | %Get status 51 | status = regexpi(str, 'Your branch is behind ''(.+)'' by (\d+)', 'tokens'); 52 | 53 | %Report to user 54 | if ~isempty(status) 55 | warning(['Updates are avaliable. ',... 56 | 'Run Datamaster Setup to Update']); 57 | end 58 | 59 | %Return to original directory 60 | cd(savedCd); flagChecked = true; 61 | catch 62 | %Warn user that we were unable to check for updates but 63 | %don't recheck this session 64 | warning('Unable to check for updates'); 65 | flagChecked = true; 66 | end 67 | end 68 | end 69 | 70 | %% Small Public Methods -> Move externally if it grows 71 | function DatastorePath = getDatastore(obj) 72 | %Returns the Full Path to the Datastore 73 | DatastorePath = obj.Datastore; 74 | 75 | end 76 | 77 | function num = numEnteries(dm) 78 | %Returns the number of Datasources stored in the Datastore 79 | num = size(dm.mDir,2); 80 | 81 | warning('numEnteries will be removed in a future release') 82 | end 83 | 84 | function dm = updateDetails(dm) 85 | %Updates the list of details that have been logged in the 86 | %database 87 | dm.Details = dm.mDir.fetch('SELECT DetailName.fieldName, DetailName.id FROM DetailName'); 88 | 89 | end 90 | 91 | function dm = updateChannels(dm) 92 | %Updates the list of channels that have been logged in the 93 | %database 94 | dm.Channels = dm.mDir.fetch('SELECT ChannelName.channelname, ChannelName.id FROM ChannelName'); 95 | 96 | end 97 | 98 | end 99 | 100 | %% Function Signatures for Public Methods 101 | methods (Access = public) 102 | status = CheckLogFileStatus(obj,LogFileLoc) 103 | 104 | FinalHash = addDatasource(dm,MoTeCFile,saveFile,Details) 105 | 106 | addEntry(dm,MoTeCFile,FinalHash,Details,channels) 107 | 108 | Datasource = getDatasource(dm,varargin) 109 | 110 | [Entry] = getEntry(dm,varargin) 111 | 112 | [index] = getIndex(dm, varargin) 113 | 114 | removeEntry(obj,varagin) 115 | 116 | Cleanup(obj) 117 | 118 | SaveDirectory(obj) 119 | 120 | checkAllDatasourcesExist(dm) 121 | 122 | LoggedParameters = allLogged(dm,varargin) 123 | end 124 | 125 | methods (Access = public, Static = true) 126 | 127 | function DatamasterPath = getPath 128 | %Function that returns the path to the Datamaster folder 129 | DatamasterPath = regexpi(which('Datamaster'),... 130 | '(.*)\\@Datamaster\\Datamaster.m', 'tokens'); 131 | DatamasterPath = DatamasterPath{:}{:}; 132 | end 133 | 134 | value = getConfigSetting(Key) 135 | 136 | 137 | colormap(name) 138 | 139 | end 140 | 141 | %Restricted Access Methods 142 | methods (Access = ?datasource) 143 | function varargout = mDirFetch(dm, varargin) 144 | %Run fetch commands against the master directory 145 | varargout{:} = dm.mDir.fetch(varargin{:}); 146 | end 147 | end 148 | 149 | end 150 | 151 | -------------------------------------------------------------------------------- /@Datamaster/addDatasource.m: -------------------------------------------------------------------------------- 1 | function FinalHash = addDatasource(dm,MoTeCFile,saveFile,Details) 2 | 3 | %Only add Datasource if new 4 | if strcmp(dm.CheckLogFileStatus(MoTeCFile),'new') 5 | %% Append Parameters and Details 6 | %Load temporary mat file into workspace 7 | ds = load(saveFile); 8 | 9 | 10 | %Convert Double Precision values to signal precision MoTeC Logs 11 | %data using ~12-14 bits per sample, thus using a 32 bit float 12 | %(single-precision float) more then sufficient to store the data. By 13 | %not storing samples as a double (64 bits), the final file size is 14 | %reduced by ~33%, with marginal errors due to truncation (On the 15 | %order of 1e-6) 16 | channels = fieldnames(ds); 17 | for i = 1:length(channels) 18 | %Check each subfield is a channel 19 | if isstruct(ds.(channels{i})) && isfield(ds.(channels{i}),'Time') && ... 20 | isfield(ds.(channels{i}),'Value') && isfield(ds.(channels{i}),'Units') 21 | %Loop over numeric fields for each channel 22 | for fields = {'Time', 'Value'} 23 | field = fields{:}; %Get string from cell array 24 | 25 | %Only reduce if data type is double 26 | if isa(ds.(channels{i}).(field),'double') 27 | %Convert Double to Single Precision float 28 | ds.(channels{i}).(field) = single(ds.(channels{i}).(field)); 29 | end 30 | end 31 | end 32 | end 33 | 34 | %Add Logged Parameters to temporary mat file 35 | ds.Channels = channels; 36 | 37 | %Add Details to temporary mat file 38 | ds.Details = Details; 39 | 40 | %Re-save temporary mat file using v7 41 | save(saveFile,'-struct','ds','-v7'); 42 | 43 | %Compute Hash of Datasource 44 | FinalHash = DataHash(saveFile,dm.HashOptions); 45 | 46 | %% Move to Datastore 47 | maxTries = 10; nTry = 0; 48 | while nTry < maxTries 49 | try 50 | %Move to Datastore 51 | saveLoc = fullfile(dm.Datastore,[FinalHash '.mat']); 52 | movefile(saveFile,saveLoc,'f'); 53 | 54 | %Ensure file is read only 55 | fileattrib(saveLoc,'-w'); 56 | 57 | %Mark as successful 58 | nTry = maxTries; 59 | catch e 60 | if strcmp(e.identifier,'MATLAB:save:unableToWriteToMatFile') 61 | pause(0.5*2^nTry); 62 | nTry = nTry +1; 63 | if nTry == maxTries 64 | rethrow(e) 65 | end 66 | end 67 | end 68 | end 69 | 70 | %Add to Master Directory 71 | dm.addEntry(MoTeCFile,FinalHash,Details,channels) 72 | else 73 | error('Can Only Add New Datasources to the Datastore') 74 | end 75 | end -------------------------------------------------------------------------------- /@Datamaster/addEntry.m: -------------------------------------------------------------------------------- 1 | function addEntry(dm, MoTeCFile, FinalHash, Details, channels) 2 | try 3 | %Check if the Log Date and Time were recorded 4 | if isfield(Details, 'LogDate') && isfield(Details, 'LogTime') 5 | try %#ok 6 | %Use date time found in details instead of file creation date 7 | logDatetime = [Details.LogTime ' ' Details.LogDate]; 8 | logDatetime = datetime(logDatetime, 'InputFormat', 'HH:mm:ss dd/MM/yyyy'); 9 | logDatetime = datevec(logDatetime); 10 | 11 | MoTeCFile.createdTime = sprintf('%04d-%02d-%02dT%02d:%02d:%02.3fZ',logDatetime); 12 | end 13 | end 14 | 15 | 16 | %Create cell array of column names and values 17 | colNames = {'ldId', 'ldxId', 'OriginHash', 'FinalHash', 'Datetime'}; 18 | values = {MoTeCFile.ld, MoTeCFile.ldx, MoTeCFile.OriginHash, FinalHash, MoTeCFile.createdTime}; 19 | 20 | %Insert into the datastore 21 | fastinsert(dm.mDir, 'masterDirectory', colNames, values) 22 | 23 | %Get id for datasource 24 | query = sprintf('select id from MasterDirectory where FinalHash=''%s''',FinalHash); 25 | datasourceId = dm.mDir.fetch(query); datasourceId = datasourceId{:}; 26 | 27 | %% Check for Missing Fields 28 | %Add Missing Names to Details Logged 29 | DetailName = dm.mDir.fetch('select fieldName from DetailName'); 30 | fieldName = fieldnames(Details); fieldName = fieldName(:); 31 | [~,indexMissing] = setxor(fieldName, DetailName); 32 | 33 | %Check if no Detail Names are missing 34 | if ~isempty(indexMissing) 35 | %Add missing Detail names to the DetailName table 36 | for i = 1:length(indexMissing) 37 | fastinsert(dm.mDir, 'DetailName', 'fieldName', fieldName(indexMissing(i))) 38 | end 39 | end 40 | 41 | %Add missing Channels 42 | ChannelName = dm.mDir.fetch('select id, channelName from ChannelName'); 43 | if ~isempty(ChannelName) 44 | [~,indexMissing] = setxor(channels,ChannelName(:,2)); 45 | else 46 | indexMissing = true(length(channels),1); 47 | end 48 | 49 | %Check if no channels are missing 50 | if ~isempty(indexMissing) 51 | %Add Channels to ChannelName Table -> Ensure channel is a column array 52 | for i = 1:length(indexMissing) 53 | dm.mDir.execute('INSERT INTO ChannelName (channelName) VALUES (?)', channels(indexMissing(i))); 54 | end 55 | end 56 | 57 | %Commit Changes 58 | dm.mDir.conn.commit 59 | 60 | %% Add Entries 61 | 62 | %Record Details 63 | for i =1:length(fieldName) 64 | if isstruct(Details.(fieldName{i})) 65 | %Check that Detail was actually logged 66 | if ~isempty(Details.(fieldName{i}).Value) 67 | %Add Entry to datastore 68 | dm.mDir.execute(['INSERT INTO DetailLog (entryId, value, unit, fieldId) ',... 69 | 'VALUES (?, ?, ?, (SELECT id FROM DetailName WHERE fieldName = ?))'],... 70 | datasourceId,... 71 | Details.(fieldName{i}).Value, Details.(fieldName{i}).Unit,... 72 | fieldName{i}); 73 | end 74 | else 75 | %Check that Details was actually logged 76 | if ~isempty(Details.(fieldName{i})) 77 | %Add Entry to datastore 78 | dm.mDir.execute(['INSERT INTO DetailLog (entryId, value, fieldId) ',... 79 | 'VALUES (?, ?, (SELECT id FROM DetailName WHERE fieldName = ?))'],... 80 | datasourceId,... 81 | Details.(fieldName{i}),... 82 | fieldName{i}); 83 | end 84 | 85 | end 86 | end 87 | 88 | %Record Channels Logged 89 | for i = 1:length(channels) 90 | query = sprintf(['INSERT INTO ChannelLog (entryId, channelId) ',... 91 | 'VALUES (%d, (SELECT id FROM ChannelName WHERE channelName = ''%s''));'],... 92 | datasourceId,channels{i}); 93 | dm.mDir.execute(query) 94 | end 95 | 96 | %Commit Changes 97 | dm.mDir.conn.commit 98 | catch e 99 | %Rollback Changes 100 | dm.mDir.conn.rollback(); 101 | 102 | %Re throw Error 103 | rethrow(e); 104 | end 105 | end 106 | 107 | function fastinsert(conn, table, fieldnames, values) 108 | %Helper function for adding entries to the datastore 109 | if ischar(values) 110 | values = {values}; 111 | end 112 | if ischar(fieldnames) 113 | fieldnames = {fieldnames}; 114 | end 115 | conn.execute(sprintf('INSERT INTO %s (%s) VALUES (''%s'')',... 116 | table, strjoin(fieldnames,','), strjoin(values,''','''))); 117 | end -------------------------------------------------------------------------------- /@Datamaster/allLogged.m: -------------------------------------------------------------------------------- 1 | function varargout = allLogged(dm,varargin) 2 | % Returns a list of every parameter logged for the requested 3 | % Datasources. Uses getEntry to provide search 4 | 5 | persistent p 6 | if isempty(p) 7 | p = inputParser; 8 | p.FunctionName = 'allLogged'; 9 | addRequired(p,'dm',@(x) isa(x,'Datamaster')); 10 | addOptional(p, 'ReturnType', 'channel', @ischar); 11 | end 12 | 13 | %Parse Inputs 14 | parse(p,dm,varargin{:}); 15 | 16 | if strcmp(p.Results.ReturnType, 'channel') 17 | %Get a list of all logged channels 18 | LoggedParameters = dm.mDir.fetch(['SELECT channelName FROM ChannelName'... 19 | ' ORDER BY channelName']); 20 | elseif strcmp(p.Results.ReturnType, 'detail') 21 | %Get a list of all logged details 22 | LoggedParameters = dm.mDir.fetch(['SELECT fieldName FROM DetailName'... 23 | ' ORDER BY fieldName']); 24 | end 25 | 26 | %Print Channels logged to console if no output requested 27 | if nargout == 0 28 | %Find the length of the longest string 29 | maxLength = max(cellfun(@length, LoggedParameters)); 30 | 31 | %Set Format Spec 32 | formatStr = ['%-' sprintf('%d', maxLength) 's\t']; 33 | formatStr = [repmat(formatStr, [1 4]) '\n']; 34 | 35 | %Output String 36 | fprintf(formatStr, LoggedParameters{:}) 37 | fprintf('\n') 38 | 39 | varargout = {}; 40 | else 41 | %Return a cell array of the logged parameters 42 | varargout{1} = LoggedParameters; 43 | end 44 | end 45 | 46 | -------------------------------------------------------------------------------- /@Datamaster/colormap.m: -------------------------------------------------------------------------------- 1 | function colormap(name) 2 | %Function for loading colormaps used by Datamaster. 3 | %All Colormaps are colorblind friendly. 4 | 5 | %Load colormap 6 | switch name 7 | %% Gradient Colormaps - To emphasize scale 8 | case 'warm' 9 | cmap = [253, 237, 134;... 10 | 253, 232, 110;... 11 | 249, 208, 98;... 12 | 245, 184, 87;... 13 | 240, 160, 75;... 14 | 235, 138, 64;... 15 | 231, 114, 53;... 16 | 227, 91, 44;... 17 | 199, 78, 41;... 18 | 157, 68, 41;... 19 | 117, 60, 44;... 20 | 76, 52, 48]; 21 | case 'cool' 22 | cmap = [220, 236, 201;... 23 | 179, 221, 204;... 24 | 138, 205, 206;... 25 | 98, 190, 210;... 26 | 70, 170, 206;... 27 | 61, 145, 190;... 28 | 53, 119, 174;... 29 | 45, 94, 158;... 30 | 36, 68, 142;... 31 | 28, 43, 127;... 32 | 22, 32, 101;... 33 | 17, 23, 75]; 34 | case 'night' 35 | cmap = [249, 205, 172;... 36 | 243, 172, 162;... 37 | 238, 139, 151;... 38 | 233, 105, 141;... 39 | 219, 80, 135;... 40 | 184, 66, 140;... 41 | 151, 52, 144;... 42 | 116, 39, 150;... 43 | 94, 31, 136;... 44 | 77, 26, 112;... 45 | 61, 20, 89;... 46 | 45, 15, 65]; 47 | %% Diverging Colormaps - Used to emphasize extremes 48 | case 'forest' 49 | cmap = [140, 81, 10;... 50 | 216, 179, 101;... 51 | 246, 232, 195;... 52 | 199, 234, 229;... 53 | 90, 180, 172;... 54 | 1, 102, 94]; 55 | case 'july' 56 | cmap = [178, 24, 43;... 57 | 239, 138, 98;... 58 | 253, 219, 199;... 59 | 209, 229, 240;... 60 | 103, 169, 207;... 61 | 33, 102, 172]; 62 | %% Qualitative Colormaps - Used when order doesn't matter 63 | case 'party' 64 | cmap = [228, 26, 28;... 65 | 55, 126, 184;... 66 | 77, 175, 74;... 67 | 152, 78, 163;... 68 | 255, 127, 0;... 69 | 255, 255, 51;... 70 | 166, 86, 40;... 71 | 247, 129, 191;... 72 | 153, 153, 153] 73 | otherwise 74 | error('Unknown colormap'); 75 | end 76 | 77 | %Scale to [0, 1] 78 | cmap = cmap ./255; 79 | 80 | %Apply Colormap 81 | colormap(gca, cmap); 82 | end 83 | 84 | -------------------------------------------------------------------------------- /@Datamaster/getConfigSetting.m: -------------------------------------------------------------------------------- 1 | function value = getConfigSetting(Key) 2 | %getConfigSetting returns an iniConfig file for the current user 3 | % Detailed explanation goes here 4 | 5 | %Expected filename for config file 6 | userConfig = fullfile(Datamaster.getPath, 'config.ini'); 7 | defaultConfig = fullfile(Datamaster.getPath, 'default.ini'); 8 | 9 | %Check for both config files 10 | for file = {userConfig, defaultConfig} 11 | if ~exist(file{:}, 'file') 12 | if strcmp(file{:}, defaultConfig) 13 | %Missing Default config file -> throw error 14 | errorStruct.message = ['Default config file (default.ini) not found. ',... 15 | 'Please Pull an up to date copy from the git repo']; 16 | errorStruct.identifier = 'Datamaster:MissingConfig'; 17 | error(errorStruct); 18 | elseif strcmp(file{:}, userConfig) 19 | %Missing User Config file -> Create empty config file 20 | warning(['User config file (config.ini) not found. ',... 21 | 'Created blank user config file']); 22 | 23 | %Create Blank file 24 | fid = fopen(userConfig, 'w'); fprintf(fid, ' '); fclose(fid); 25 | end 26 | end 27 | end 28 | 29 | userValue = getKeyValue(userConfig, Key); 30 | defaultValue = getKeyValue(defaultConfig, Key); 31 | 32 | %Check if the user has set the key 33 | if ~isempty(userValue) 34 | %Return Key from user settings 35 | value = userValue; 36 | 37 | %Check for a default setting 38 | elseif ~isempty(defaultValue) 39 | %Return key from default settings 40 | value = defaultValue; 41 | 42 | else % Abort and notify User 43 | errorStruct.message = sprintf('Missing Value for %s:%s',Section,Key); 44 | errorStruct.identifier = 'Datamaster:ConfigSetting:MissingKey'; 45 | error(errorStruct); 46 | end 47 | 48 | %% Post Processing If Needed 49 | if ischar(value) 50 | %Replace %datastore% with datastore_path 51 | if strfind(value, '%datastore%') 52 | value = strrep(value, '%datastore%',... 53 | Datamaster.getConfigSetting('datastore_path')); 54 | %Get Valid File Path 55 | value = fullfile(value); 56 | end 57 | 58 | %Replace %Datamaster% with path to Datamaster 59 | if strfind(value, '%Datamaster%') 60 | value = strrep(value, '%Datamaster%',... 61 | Datamaster.getPath); 62 | %Get Valid File Path 63 | value = fullfile(value); 64 | end 65 | end 66 | 67 | end 68 | 69 | function value = getKeyValue(filename, key) 70 | %Scan config files and return the correct value for the request key 71 | 72 | %Preallocate null value 73 | value = []; 74 | 75 | %Get File id 76 | fid = fopen(filename); 77 | while ~feof(fid) 78 | line = fgetl(fid); 79 | 80 | %Scan for key 81 | if regexpi(line, sprintf('^%s=', key)) 82 | %Extract value 83 | value = regexpi(line, sprintf('^%s=(.+)$', key), 'tokens'); 84 | value = value{:}{:}; 85 | break 86 | end 87 | end 88 | 89 | %Close File 90 | fclose(fid); 91 | end 92 | -------------------------------------------------------------------------------- /@Datamaster/getDatasource.m: -------------------------------------------------------------------------------- 1 | function ds = getDatasource(dm,varargin) 2 | %Function to handle extracting a datasource from Datamaster 3 | 4 | %Get Entries for requested datasource 5 | entry = dm.getEntry(varargin{:}); 6 | 7 | %Access requested datasources 8 | ds = datasource.empty(0,length(entry)); 9 | for i = 1:length(entry) 10 | ds(i) = datasource(dm,entry(i)); 11 | end 12 | 13 | %Force to column 14 | ds = ds(:); 15 | end -------------------------------------------------------------------------------- /@Datamaster/getEntry.m: -------------------------------------------------------------------------------- 1 | function [entry] = getEntry(dm,varargin) 2 | %Function to retrieve directory entries from Datamaster 3 | %StartDate/EndData must be either a datetime or string of the format 4 | %MM/dd/uuuu/ 5 | 6 | %Run Search through getIndex 7 | index = dm.getIndex(varargin{:}); 8 | 9 | %% Create Entries from database 10 | query = sprintf(['SELECT ChannelLog.entryId, group_concat(ChannelName.channelName) FROM ChannelLog ',... 11 | 'INNER JOIN ChannelName ON ChannelName.id = ChannelLog.channelId ',... 12 | 'WHERE ChannelLog.entryId IN (%s) GROUP BY ChannelLog.entryId'],... 13 | strjoin(sprintfc('%d',index),',')); 14 | ChannelLog = dm.mDir.fetch(query); 15 | 16 | MasterLog = dm.mDir.fetch(sprintf(['SELECT id, OriginHash, FinalHash, Datetime FROM masterDirectory ',... 17 | 'WHERE masterDirectory.id IN (%s)'], strjoin(sprintfc('%d',index),','))); 18 | 19 | % Create Entries from Logs 20 | entry = repmat(struct('Index', [], 'OriginHash', [], 'FinalHash', [], 'Channel', [], 'Detail', [], 'Datetime', []),[size(MasterLog,1),1]); 21 | 22 | %If no entries were found skip 23 | if ~isempty(entry) 24 | %Extract entryId from ChannelLog 25 | channelEntryId = [ChannelLog{:,1}]; 26 | 27 | %Preallocate MasterLogIndex for locating entries 28 | MasterLogIndex = [MasterLog{:,1}]; 29 | for i = 1:size(MasterLog, 1) 30 | %Add Channels to entry 31 | matchIndex = channelEntryId == index(i); %Find Record in ChannelLog 32 | record = ChannelLog(matchIndex,2); %Extract Channels from ChannelLog 33 | 34 | %Check if record is empty 35 | if ~isempty(record) 36 | record = textscan(record{:},'%s','Delimiter',','); %Extract channels to cell array 37 | entry(i).Channel = record{:}; %Extract inner cell array 38 | else 39 | entry(i).Channel = {}; 40 | end 41 | 42 | %Add from MasterLog 43 | masterIndex = MasterLogIndex == index(i); 44 | entry(i).Index = MasterLog{masterIndex,1}; 45 | entry(i).OriginHash = MasterLog{masterIndex,2}; 46 | entry(i).FinalHash = MasterLog{masterIndex,3}; 47 | entry(i).Datetime = MasterLog{masterIndex,4}; 48 | end 49 | end 50 | end -------------------------------------------------------------------------------- /@Datamaster/getIndex.m: -------------------------------------------------------------------------------- 1 | function [index] = getIndex(dm, varargin) 2 | %Gives the id of datasources in the masterDirectory that meet search criteria 3 | %Inputs: Key-value searches for the following: 4 | % Search by Details: Search for items logged in the .ldx file by a key value search for the Detail name 5 | % Example: 'Driver','alw224' -> Find datasources where 'alw224' was logged as the 'Driver' 6 | % Search by Date (StartDate, EndDate): Search for datasource before/after/between the provided date range 7 | % Inputs: StartDate, EndDate must be either datatime or use the format MM/dd/YYYY 8 | % Example: 'StartDate', '09/19/2016', 'EndDate', '09/25/2016' 9 | % Search by Channel: Find datasource with a given set of parameters 10 | % Input: channel must be a string (if one channel) or a cell array of strings 11 | % Example: 'channel', {'Engine_RPM', 'Throttle_Pos'} or 'channel', 'Engine_RPM' 12 | % 13 | % Output: 14 | % index: array of id integers for Datasource from masterDirectory 15 | 16 | 17 | %% Create Persistent Input Parser to handle reading inputs 18 | persistent p 19 | if isempty(p)% || true 20 | p = inputParser; 21 | p.FunctionName = 'getEntry'; 22 | addRequired(p,'dm',@(x) isa(x,'Datamaster')); 23 | addOptional(p,'Hash','',@(x) dm.validateHash(x)); 24 | 25 | %Add a Parameter for Each fieldname 26 | for i = 1:length(dm.Details) 27 | addParameter(p,dm.Details{i,1}, [], @(x) ischar(x) || iscell(x)); 28 | end 29 | 30 | % Add Parameter to search by channel 31 | addParameter(p,'channel', [], @(x) ischar(x) || iscell(x)); 32 | 33 | % Add a Parameter to a date range of interest 34 | addParameter(p,'StartDate', [], @(x) validateDatetime(x)); 35 | addParameter(p,'EndDate', [], @(x) validateDatetime(x)); 36 | 37 | % Add Parameters to control how many results are returned 38 | addParameter(p,'limit', [], @(x) isnumeric(x) && (x == round(x))); 39 | end 40 | 41 | %Parse Inputs and expand to vectors 42 | parse(p,dm,varargin{:}); 43 | Hash = p.Results.Hash; 44 | channel = p.Results.channel; 45 | StartDate = p.Results.StartDate; 46 | EndDate = p.Results.EndDate; 47 | 48 | %Force channel into a cell array 49 | if ~isa(channel,'cell') && ~isempty(channel) 50 | channel = {channel}; 51 | end 52 | 53 | if nargin == 1 54 | %If no arguments are supplied return everything 55 | index = dm.mDir.fetch('SELECT id FROM masterDirectory'); 56 | elseif ~strcmp(Hash,'') 57 | %Return Database entries for that contain the supplied hash 58 | if iscellstr(Hash) 59 | %Join hashes in to a list of hashes 60 | hashStr = strjoin(Hash,''','''); 61 | else %Hash is a string 62 | hashStr = Hash; 63 | end 64 | query = sprintf(['SELECT id FROM masterDirectory ',... 65 | 'WHERE masterDirectory.FinalHash IN (''%s'') OR ',... 66 | 'masterDirectory.OriginHash IN (''%s'')'],hashStr, hashStr); 67 | index = dm.mDir.fetch(query); 68 | 69 | else %Search by Request 70 | 71 | %Create Cell Array to store each sub query 72 | fullQuery = {}; 73 | 74 | %% Search in Field 75 | % Check if a search has been requested for each field 76 | query = cell(length(dm.Details),1); % Preallocate cell array to store queries 77 | for i = 1:length(dm.Details) 78 | if ~isempty(p.Results.(dm.Details{i})) 79 | % Search Field for Requested String 80 | query{i} = sprintf('(DetailName.fieldName = ''%s'' AND DetailLog.value LIKE ''%s'')',dm.Details{i},p.Results.(dm.Details{i})); 81 | end 82 | end 83 | 84 | %Remove empty queries 85 | query(cellfun(@isempty,query)) = []; 86 | 87 | % Create search query for details 88 | if ~isempty(query) 89 | %concatenate queries and add to fullQuery 90 | fullQuery{end+1} = sprintf(['SELECT masterDirectory.id FROM DetailLog ',... 91 | 'INNER JOIN DetailName ON DetailName.id = DetailLog.fieldId ',... 92 | 'INNER JOIN masterDirectory ON masterDirectory.id = DetailLog.entryId ',... 93 | 'WHERE %s'... 94 | 'GROUP BY entryId HAVING count(*) = %d'],strjoin(query,' OR '),length(query)); 95 | end 96 | 97 | 98 | %% Search by Datetime 99 | if ~isempty(StartDate) && ~isempty(EndDate) 100 | fullQuery{end+1} = sprintf(['SELECT masterDirectory.id FROM masterDirectory ',... 101 | 'WHERE Datetime BETWEEN ''%s'' AND ''%s'''], StartDate, EndDate); 102 | elseif ~isempty(StartDate) 103 | fullQuery{end+1} = sprintf(['SELECT masterDirectory.id FROM masterDirectory ',... 104 | 'WHERE Datetime >= ''%s'''],StartDate); 105 | elseif ~isempty(EndDate) 106 | fullQuery{end+1} = sprintf(['SELECT masterDirectory.id FROM masterDirectory ',... 107 | 'WHERE Datetime <= ''%s'''],EndDate); 108 | end 109 | 110 | %% Search by Parameters 111 | if ~isempty(channel) 112 | fullQuery{end+1} = sprintf(['SELECT masterDirectory.id FROM channelLog ',... 113 | 'INNER JOIN ChannelName ON ChannelName.id = ChannelLog.channelId ',... 114 | 'INNER JOIN masterDirectory ON masterDirectory.id = ChannelLog.entryId ',... 115 | 'WHERE ChannelName.channelName IN (''%s'') ',... 116 | 'GROUP BY masterDirectory.id HAVING count(*) = %d'],... 117 | strjoin(channel, ''', '''),length(channel)); 118 | end 119 | 120 | 121 | %% Set Limit 122 | if ~isempty(p.Results.limit) 123 | fullQuery{end+1} = sprintf('SELECT masterDirectory.id FROM masterDirectory LIMIT %d', p.Results.limit); 124 | end 125 | 126 | %Combine Queries and get the list of Datasource that meet search criteria 127 | query = strjoin(fullQuery,' INTERSECT '); 128 | index = dm.mDir.fetch(query); 129 | end 130 | 131 | %Convert to numerical array 132 | if ~isempty(index) 133 | index = [index{:}]'; 134 | end 135 | end -------------------------------------------------------------------------------- /@Datamaster/private/connectSQLite.m: -------------------------------------------------------------------------------- 1 | function [SQLite_Database] = connectSQLite(dbpath) 2 | %connectSQLite connects to an SQLite Database and returns a database connection 3 | 4 | %Check if python is installed 5 | if isempty(pyversion) 6 | fprintf(['Please install python\n',... 7 | 'Be sure to check the option to "Add Python to Path"\n'... 8 | 'Python can be found at the above link or www.python.org\n']); 9 | error('Datamaster requires python'); 10 | end 11 | 12 | %Check if dbpath exist 13 | if exist(dbpath,'file') == 2 14 | %Open Connection 15 | SQLite_Database = sqlite(dbpath); 16 | else 17 | %Report error 18 | error('Unable to Find Datastore: %s', dbpath); 19 | end 20 | 21 | -------------------------------------------------------------------------------- /@Datamaster/private/getGitInfo.m: -------------------------------------------------------------------------------- 1 | function gitInfo=getGitInfo(varargin) 2 | % Get information about the Git repository in the current directory, including: 3 | % - branch name of the current Git Repo 4 | % -Git SHA1 HASH of the most recent commit 5 | % -url of corresponding remote repository, if one exists 6 | % 7 | % The function first checks to see if a .git/ directory is present. If so it 8 | % reads the .git/HEAD file to identify the branch name and then it looks up 9 | % the corresponding commit. 10 | % 11 | % It then reads the .git/config file to find out the url of the 12 | % corresponding remote repository. This is all stored in a gitInfo struct. 13 | % 14 | % Note this uses only file information, it makes no external program 15 | % calls at all. 16 | % 17 | % This function must be in the base directory of the git repository 18 | % 19 | % Released under a BSD open source license. Based on a concept by Marc 20 | % Gershow. 21 | % 22 | % Andrew Leifer 23 | % Harvard University 24 | % Program in Biophysics, Center for Brain Science, 25 | % and Department of Physics 26 | % leifer@fas.harvard.edu 27 | % http://www.andrewleifer.com 28 | % 12 September 2011 29 | % 30 | % 31 | % 32 | 33 | % Copyright 2011 Andrew Leifer. All rights reserved. 34 | % 35 | % Redistribution and use in source and binary forms, with or without modification, are 36 | % permitted provided that the following conditions are met: 37 | % 38 | % 1. Redistributions of source code must retain the above copyright notice, this list of 39 | % conditions and the following disclaimer. 40 | % 41 | % 2. Redistributions in binary form must reproduce the above copyright notice, this list 42 | % of conditions and the following disclaimer in the documentation and/or other materials 43 | % provided with the distribution. 44 | % 45 | % THIS SOFTWARE IS PROVIDED BY ''AS IS'' AND ANY EXPRESS OR IMPLIED 46 | % WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 47 | % FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL OR 48 | % CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 49 | % CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 50 | % SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 51 | % ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 52 | % NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 53 | % ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 54 | % 55 | % The views and conclusions contained in the software and documentation are those of the 56 | % authors and should not be interpreted as representing official policies, either expressed 57 | % or implied, of . 58 | 59 | gitInfo=[]; 60 | if ~isempty(varargin) && length(varargin) ==1 61 | repoPath = fullfile(varargin{1},'.git'); 62 | else 63 | repoPath = '.git'; 64 | end 65 | 66 | if ~exist(repoPath,'file') || ~exist([repoPath '/HEAD'],'file') 67 | %Git is not present 68 | return 69 | end 70 | 71 | 72 | 73 | %Read in the HEAD information, this will tell us the location of the file 74 | %containing the SHA1 75 | text=fileread([repoPath '/HEAD']); 76 | parsed=textscan(text,'%s'); 77 | 78 | if ~strcmp(parsed{1}{1},'ref:') || ~length(parsed{1})>1 79 | %the HEAD is not in the expected format. 80 | %give up 81 | return 82 | end 83 | 84 | path=parsed{1}{2}; 85 | [pathstr, name, ext]=fileparts(path); 86 | branchName=name; 87 | 88 | %save branchname 89 | gitInfo.branch=branchName; 90 | 91 | 92 | %Read in SHA1 93 | SHA1text=fileread(fullfile(repoPath, pathstr,[name ext])); 94 | SHA1=textscan(SHA1text,'%s'); 95 | gitInfo.hash=SHA1{1}{1}; 96 | 97 | 98 | %Read in config file 99 | config=fileread([repoPath '/config']); 100 | %Find everything space delimited 101 | temp=textscan(config,'%s','delimiter','\n'); 102 | lines=temp{1}; 103 | 104 | remote=''; 105 | %Lets find the name of the remote corresponding to our branchName 106 | for k=1:length(lines) 107 | 108 | %Are we at the section describing our branch? 109 | if strcmp(lines{k},['[branch "' branchName '"]']) 110 | m=k+1; 111 | %While we haven't run out of lines 112 | %And while we haven't run into another section (which starts with 113 | % an open bracket) 114 | while (m<=length(lines) && ~strcmp(lines{m}(1),'[') ) 115 | temp=textscan(lines{m},'%s'); 116 | if length(temp{1})>=3 117 | if strcmp(temp{1}{1},'remote') && strcmp(temp{1}{2},'=') 118 | %This is the line that tells us the name of the remote 119 | remote=temp{1}{3}; 120 | end 121 | end 122 | 123 | m=m+1; 124 | end 125 | 126 | 127 | 128 | end 129 | end 130 | gitInfo.remote=remote; 131 | 132 | 133 | url=''; 134 | %Find the remote's url 135 | for k=1:length(lines) 136 | 137 | %Are we at the section describing our branch? 138 | if strcmp(lines{k},['[remote "' remote '"]']) 139 | m=k+1; 140 | %While we haven't run out of lines 141 | %And while we haven't run into another section (which starts with 142 | % an open bracket) 143 | while (m<=length(lines) && ~strcmp(lines{m}(1),'[') ) 144 | temp=textscan(lines{m},'%s'); 145 | if length(temp{1})>=3 146 | if strcmp(temp{1}{1},'url') && strcmp(temp{1}{2},'=') 147 | %This is the line that tells us the name of the remote 148 | url=temp{1}{3}; 149 | end 150 | end 151 | 152 | m=m+1; 153 | end 154 | 155 | 156 | 157 | end 158 | end 159 | 160 | gitInfo.url=url; -------------------------------------------------------------------------------- /@Datamaster/private/getKeyValue.m: -------------------------------------------------------------------------------- 1 | function Value = getKeyValue(filename, key) 2 | 3 | %Open File 4 | fid = fopen(filename); 5 | 6 | %Read in line by line 7 | while ~feof(fid) 8 | lineText = fgetl(fid); 9 | 10 | %Check for key 11 | if regexpi(lineText, sprintf('%s=', key)) 12 | %Extact Value 13 | Value = regexpi(lineText, '=(.+)', 'tokens'); 14 | Value = Value{:}{:}; 15 | break 16 | end 17 | end 18 | 19 | %Close Config File 20 | fclose(fid); -------------------------------------------------------------------------------- /@Datamaster/private/reportGitInfo.m: -------------------------------------------------------------------------------- 1 | function reportGitInfo() 2 | %Report Current Version info 3 | gitInfo = getGitInfo(Datamaster.getPath); 4 | if ~isempty(gitInfo) 5 | fprintf('Version: %s - %s\n',gitInfo.branch, gitInfo.hash); 6 | fprintf('Host: %s\n',gitInfo.url); 7 | else 8 | warning('You are not using git and will miss out on the latest updates'); 9 | end 10 | end -------------------------------------------------------------------------------- /@Datamaster/private/validateDatetime.m: -------------------------------------------------------------------------------- 1 | function valid = validateDatetime(datestr) 2 | %Function to validate that a string is a valid datetime to pass to 3 | %sqlite 4 | 5 | %Assume string is invalid 6 | valid = false; 7 | 8 | %Assert that datestr is a string 9 | if ~ischar(datestr) 10 | return 11 | end 12 | 13 | %Parse the dateString into a date vector 14 | %ie. {YYYY, MM, DD, HH, MM} 15 | dateVec = regexp(datestr,... 16 | '(\d{4})-(\d{1,2})-(\d{1,2})(?> (\d{1,2}):(\d{1,2}))?', 'tokens'); 17 | 18 | %Assert that something matched 19 | if isempty(dateVec) 20 | return 21 | else 22 | %Extract Results 23 | dateVec = dateVec{:}; 24 | end 25 | 26 | %Validate magnitudes 27 | dateVec = str2double(dateVec); 28 | 29 | %Check months 30 | if ~(dateVec(2) <= 12 && dateVec(2) >= 1) 31 | return 32 | end 33 | 34 | %Check Days 35 | if ~(dateVec(3) <= 31 && dateVec(3) >= 1) 36 | return 37 | end 38 | 39 | %If hous and minutes are supplied also check 40 | if length(dateVec) == 5 41 | %Check Hours 42 | if ~(dateVec(4) <= 24 && dateVec(4) >= 0) 43 | return 44 | end 45 | 46 | %Check Minutes 47 | if ~(dateVec(5) <= 59 && dateVec(5) >= 0) 48 | return 49 | end 50 | end 51 | 52 | %All checks pased -> valid datestring 53 | valid = true; 54 | 55 | -------------------------------------------------------------------------------- /@Datamaster/private/validateHash.m: -------------------------------------------------------------------------------- 1 | function valid = validateHash(dm, hash) 2 | %Validate the supplied hash is either a string of 64 chars or a cell 3 | %array of valid hashes 4 | 5 | valid = true; 6 | if iscellstr(hash) 7 | %Check that each hash in the cell array is valid 8 | i = 1; 9 | while i < length(hash) && valid 10 | valid = dm.validateHash(hash{i}) && valid; 11 | i = i+1; 12 | end 13 | elseif ischar(hash) 14 | %Valid hashes are of length 64 15 | if length(hash) ~= 64 16 | valid = false; 17 | return 18 | end 19 | 20 | %Valid Hashes Exist have enteries in the Master Directory 21 | if length(dm.mDir.fetch(sprintf(['SELECT id FROM masterDirectory ',... 22 | 'WHERE OriginHash = ''%s'' OR FinalHash = ''%s'''],... 23 | hash, hash))) ~=1 24 | valid = false; 25 | end 26 | else 27 | %Not a valid input 28 | valid = false; 29 | end 30 | end -------------------------------------------------------------------------------- /@datasource/CDF2.m: -------------------------------------------------------------------------------- 1 | function [cdf_2, x, y, duration] = CDF2(ds,varargin) 2 | %Function to generate a CDF along the y axis of a bivariate histogram 3 | %of the specified channel for all supplied datasources 4 | 5 | persistent p 6 | if ~isempty(p) || true 7 | p = inputParser; 8 | p.FunctionName = 'Histogram2'; 9 | p.addRequired('ds', @(x) isa(x,'datasource')); 10 | p.addRequired('chanNameX', @(x) ischar(x)); 11 | p.addRequired('chanNameY', @(x) ischar(x)); 12 | p.addRequired('Range', @(x) isfloat(x) && all(size(x)==[2,2])); 13 | p.addParameter('unit',{'base', 'base'}, @(x) iscell(x) && numel(x) == 2); 14 | p.addParameter('nBins', [50,50], @(x) isfloat(x) && length(x)==2); 15 | p.addParameter('ContourLevel', [0.99, 0.975, 0.95], @isnumeric); 16 | end 17 | 18 | %Parse Input 19 | parse(p,ds,varargin{:}); 20 | ds = p.Results.ds; 21 | chanNameX = p.Results.chanNameX; 22 | chanNameY = p.Results.chanNameY; 23 | nBins = p.Results.nBins; 24 | Range = p.Results.Range; 25 | ContourLevel = p.Results.ContourLevel; 26 | 27 | %Assert that some datasource match 28 | assert(~isempty(ds),'No Matching Datasources Found'); 29 | 30 | %Initialize arrays 31 | edgesX = linspace(Range(1,1),Range(1,2),nBins(1)+1); 32 | edgesY = linspace(Range(2,1),Range(2,2),nBins(2)+1); 33 | 34 | %% Process Datasource 35 | [count,duration] = mapReduce(ds, @mapFun,... 36 | @reduceFun, {chanNameX, chanNameY}); 37 | 38 | %Define mapFun 39 | function [count, duration] = mapFun(ds) 40 | %Load Required Channels and sync sampling Rates 41 | ds.loadChannel({chanNameX, chanNameY}); 42 | ds.Sync; 43 | 44 | %Get Channels 45 | channelX = ds.getChannel(chanNameX, 'unit', p.Results.unit{1}).Value; 46 | channelY = ds.getChannel(chanNameY, 'unit', p.Results.unit{2}).Value; 47 | 48 | count = histcounts2(channelX,channelY,edgesX,edgesY); 49 | duration = range(ds.getChannel(chanNameX).Time); 50 | end 51 | 52 | %Define Reduce Function 53 | function [count, duration] = reduceFun(count, duration) 54 | count = sum(cat(3,count{:}),3)'; 55 | duration = sum([duration{:}]); 56 | end 57 | 58 | %Compute Center Locations of each bin 59 | x = (edgesX(1:end-1) + edgesX(2:end))/2; 60 | y = (edgesY(1:end-1) + edgesY(2:end))/2; 61 | 62 | %Create CDF Curves - Power 63 | cdf_2 = bsxfun(@rdivide, cumsum(count), sum(count)); 64 | cdf_2(isnan(cdf_2)) = inf; 65 | 66 | % Plot Contours 67 | gcf; box on 68 | Datamaster.colormap('warm') 69 | hold on 70 | for i =1:length(ContourLevel) 71 | contour(x, y, cdf_2, 'LevelList', ContourLevel(i)); 72 | end 73 | caxis([0.95*min(ContourLevel), max(ContourLevel)]); 74 | hold off 75 | 76 | %Label 77 | xlabel(sprintf('%s [%s]',chanNameX, p.Results.unit{1}),'interpreter','none') 78 | ylabel(sprintf('%s [%s]',chanNameY, p.Results.unit{2}),'interpreter','none') 79 | title(sprintf('Based on %3.2f hrs of data',duration/3600)); 80 | end -------------------------------------------------------------------------------- /@datasource/Histogram.m: -------------------------------------------------------------------------------- 1 | function [count,ax] = Histogram(ds,varargin) 2 | %Function to generate a histogram of the specified channel for all supplied datasources 3 | 4 | persistent p 5 | if ~isempty(p) || true 6 | p = inputParser; 7 | p.FunctionName = 'Histogram'; 8 | p.addRequired('ds', @(x) isa(x,'datasource')); 9 | p.addRequired('chanName', @(x) ischar(x)); 10 | p.addRequired('Range', @(x) isfloat(x) && length(x)==2); 11 | p.addParameter('unit', 'base', @ischar); 12 | p.addParameter('nBins', 50, @isfloat); 13 | p.addParameter('Normalization', 'pdf',... 14 | @(x) any(strcmp(x,{'count','probability','pdf'}))); 15 | end 16 | 17 | %Parse Input 18 | parse(p,ds,varargin{:}); 19 | ds = p.Results.ds; 20 | chanName = p.Results.chanName; 21 | nBins = p.Results.nBins; 22 | Range = p.Results.Range; 23 | 24 | %Assert that some datasource match 25 | assert(~isempty(ds),'No Matching Datasources Found'); 26 | 27 | %Initialize arrays 28 | edges = linspace(Range(1),Range(2),nBins+1); 29 | 30 | %% Process Datasource 31 | [count,duration] = mapReduce(ds, @mapFun, @reduceFun, chanName); 32 | 33 | %Define mapFun 34 | function [count, duration] = mapFun(ds) 35 | channel = ds.getChannel(chanName, 'unit', p.Results.unit); 36 | count = histcounts(channel.Value,edges); 37 | duration = range(channel.Time); 38 | end 39 | 40 | %Define Reduce Function 41 | function [count, duration] = reduceFun(count, duration) 42 | count = sum(cell2mat(count)); 43 | duration = sum([duration{:}]); 44 | end 45 | 46 | %% Normalize Counts 47 | switch p.Results.Normalization 48 | case 'pdf' 49 | count = count ./ (sum(count) * (range(Range)/nBins)); 50 | ylabel('Proablility Density'); 51 | case 'probability' 52 | count = count ./ sum(count); 53 | ylabel('Probability'); 54 | case 'count' 55 | %Do Nothing 56 | ylabel('Count'); 57 | end 58 | 59 | %% Plot the histogram 60 | ax = gca; 61 | hold on; box on 62 | xBarPoints = (edges(1:end-1) + edges(2:end))/2; 63 | bar(ax,xBarPoints,count,'histc'); 64 | 65 | %Label Histogram 66 | xlabel(sprintf('%s [%s]',chanName, p.Results.unit),'interpreter','none') 67 | ylabel(p.Results.Normalization); 68 | title(sprintf('Based on %3.2f hrs of data',duration/3600)); 69 | 70 | %Update axis 71 | axis normal 72 | hold off 73 | end -------------------------------------------------------------------------------- /@datasource/Histogram2.m: -------------------------------------------------------------------------------- 1 | function [count,h,ax] = Histogram2(ds,varargin) 2 | %Function to generate a histogram of the specified channel for all supplied datasources 3 | 4 | persistent p 5 | if ~isempty(p) || true 6 | p = inputParser; 7 | p.FunctionName = 'Histogram2'; 8 | p.addRequired('ds', @(x) isa(x,'datasource')); 9 | p.addRequired('chanNameX', @(x) ischar(x)); 10 | p.addRequired('chanNameY', @(x) ischar(x)); 11 | p.addRequired('Range', @(x) isfloat(x) && all(size(x)==[2,2])); 12 | p.addParameter('unit',{'base', 'base'}, @(x) iscell(x) && numel(x) == 2); 13 | p.addParameter('nBins', [50,50], @(x) isfloat(x) && length(x)==2); 14 | p.addParameter('normalization', 'pdf',... 15 | @(x) any(strcmp(x,{'count','probability','pdf'}))); 16 | end 17 | 18 | %Parse Input 19 | parse(p,ds,varargin{:}); 20 | ds = p.Results.ds; 21 | chanNameX = p.Results.chanNameX; 22 | chanNameY = p.Results.chanNameY; 23 | nBins = p.Results.nBins; 24 | Range = p.Results.Range; 25 | 26 | %Assert that some datasource match 27 | assert(~isempty(ds),'No Matching Datasources Found'); 28 | 29 | %Set the Unit 30 | %Set Unit 31 | if isempty(p.Results.unit) 32 | %Units unset by User -> Default to first Datasources's Units 33 | unit{1} = ds(1).getChannel(chanNameX).Units; 34 | unit{2} = ds(2).getChannel(chanNameY).Units; 35 | else 36 | unit = p.Results.unit; 37 | end 38 | 39 | %Initialize arrays 40 | edgesX = linspace(Range(1,1),Range(1,2),nBins(1)+1); 41 | edgesY = linspace(Range(2,1),Range(2,2),nBins(2)+1); 42 | 43 | %% Process Datasource 44 | [count,duration] = mapReduce(ds, @mapFun,... 45 | @reduceFun, {chanNameX, chanNameY}); 46 | 47 | %Define mapFun 48 | function [count, duration] = mapFun(ds) 49 | %Load Required Channels and sync sampling Rates 50 | ds.loadChannel({chanNameX, chanNameY}); 51 | ds.Sync; 52 | 53 | %Get Channels 54 | channelX = ds.getChannel(chanNameX, 'unit', p.Results.unit{1}).Value; 55 | channelY = ds.getChannel(chanNameY, 'unit', p.Results.unit{2}).Value; 56 | 57 | count = histcounts2(channelX,channelY,edgesX,edgesY); 58 | duration = range(ds.getChannel(chanNameX).Time); 59 | end 60 | 61 | %Define Reduce Function 62 | function [count, duration] = reduceFun(count, duration) 63 | count = sum(cat(3,count{:}),3); 64 | duration = sum([duration{:}]); 65 | end 66 | 67 | %% Normalize Counts 68 | switch p.Results.normalization 69 | case 'pdf' 70 | %Compute Area of each bin 71 | cellArea = range(Range(1,:))*range(Range(2,:)); 72 | cellArea = cellArea / numel(count); 73 | 74 | %Norm by number of bins and area of each bin 75 | count = count ./(sum(count(:))*cellArea); 76 | 77 | %Set label for colorbar 78 | cBarLabel = 'log10(Probability Density)'; 79 | case 'probability' 80 | count = count ./ sum(sum(count)); 81 | 82 | %Set label for colorbar 83 | cBarLabel = 'log10(Probability)'; 84 | case 'count' 85 | %Set label for colorbar 86 | cBarLabel = 'log10(Count)'; 87 | end 88 | 89 | %Due to the wide range of orders of magnitude-> take the log 90 | count = log10(count); 91 | 92 | %Plot the histogram and turn of countour lines 93 | xBarPoints = (edgesX(1:end-1) + edgesX(2:end))/2; 94 | yBarPoints = (edgesY(1:end-1) + edgesY(2:end))/2; 95 | [~,h] = contourf(xBarPoints,yBarPoints,count'); %Transpose as countourf and histocounts define x differently 96 | h.LineStyle = 'none'; 97 | box on 98 | 99 | %Add and label the colorbar 100 | cBar = colorbar; Datamaster.colormap('warm'); 101 | ylabel(cBar,cBarLabel, 'FontSize', 12); 102 | 103 | %Set the coloraxis to something reasonable 104 | m = mean(count(~isinf(count))); s = std(count(~isinf(count))); 105 | caxis([m-3*s, m+3*s]); 106 | 107 | %Label Histogram 108 | xlabel(sprintf('%s [%s]',chanNameX,unit{1}),'interpreter','none') 109 | ylabel(sprintf('%s [%s]',chanNameY,unit{2}),'interpreter','none') 110 | title(sprintf('Based on %3.2f hrs of data',duration/3600)); 111 | end -------------------------------------------------------------------------------- /@datasource/Sync.m: -------------------------------------------------------------------------------- 1 | function timeNew = Sync(ds, varargin) 2 | %Synchronizes the sampling rate of all opened channels 3 | 4 | %Create Input Parser 5 | persistent p 6 | if ~isempty(p) || true 7 | p = inputParser; 8 | p.FunctionName = 'Sync'; 9 | 10 | %ds must be a datasource with at least one channel loaded 11 | addRequired(p,'ds',@(x) (isa(x,'datasource') && ~isempty(x.Data))); 12 | 13 | %syncType must be one of the available types 14 | addOptional(p,'syncType','fast', @(x) any(strcmp(x, {'fast', 'slow', 'cheap'}))); 15 | end 16 | 17 | %Read in inputs 18 | parse(p,ds,varargin{:}); 19 | ds = p.Results.ds; 20 | syncType = p.Results.syncType; 21 | 22 | %% Identify the sampling period of each channel 23 | channels = fieldnames(ds.Data); 24 | samplePeriod = zeros(length(channels),1); 25 | timeStart = 0; timeEnd = inf; 26 | for i = 1:length(channels) 27 | %Compute the average sample period 28 | samplePeriod(i) = mean(diff(ds.Data.(channels{i}).Time)); 29 | 30 | %Find the inclusive start and stop times 31 | timeStart = max(timeStart, ds.Data.(channels{i}).Time(1)); 32 | timeEnd = min(timeEnd, ds.Data.(channels{i}).Time(end)); 33 | end 34 | 35 | %% Decide what the new sampling period will be 36 | switch syncType 37 | case 'fast' 38 | % The Fastest sampling rate has the smallest period 39 | newSamplePeriod = min(samplePeriod); 40 | case 'slow' 41 | % The Slowest sampling rate that the largest period 42 | newSamplePeriod = max(samplePeriod); 43 | case 'cheap' 44 | % The cheapest sampling rate to compute will re-sample the fewest channels 45 | newSamplePeriod = mode(samplePeriod); 46 | otherwise 47 | errorStruct.message = sprintf('%s is not a valid syncType',syncType); 48 | errorStruct.identifier = 'Datamaster:datasource:Sync'; 49 | error(errorStruct); 50 | end 51 | 52 | %Create a new time inclusive time vector at the newSamplingRate 53 | timeNew = timeStart:newSamplePeriod:timeEnd; 54 | 55 | %% Re-sample Channels as needed 56 | for i = 1:length(channels) 57 | % Re-sample each channel 58 | ds.Data.(channels{i}).Value = interp1(... 59 | ds.Data.(channels{i}).Time,... 60 | ds.Data.(channels{i}).Value,... 61 | timeNew); 62 | ds.Data.(channels{i}).Time = timeNew; 63 | end 64 | 65 | -------------------------------------------------------------------------------- /@datasource/TimePlot.m: -------------------------------------------------------------------------------- 1 | function ax = TimePlot(ds,varargin) 2 | 3 | %Create Input Parser Object 4 | persistent p 5 | if ~isempty(p) || true 6 | p = inputParser; 7 | p.FunctionName = 'TimePlot'; 8 | p.addRequired('ds',@(x) isa(x,'datasource')); %ds must be a single datasource 9 | p.addOptional('ax',gca, @(x) isa(x,matlab.graphics.axis.Axes)); 10 | p.addRequired('chanName',@(x) ischar(x)); 11 | p.addParameter('unit', 'base', @ischar); 12 | end 13 | 14 | parse(p,ds,varargin{:}); 15 | ds = p.Results.ds; 16 | chanName = p.Results.chanName; 17 | unit = p.Results.unit; 18 | ax = p.Results.ax; 19 | 20 | %Load required channels 21 | ds.loadChannel(chanName); 22 | 23 | %Plot the Channel 24 | channel = ds.getChannel(chanName, 'unit', unit); 25 | plot(ax, channel.Time,channel.Value); 26 | 27 | %Annotate Plot 28 | xlabel('Time [s]') 29 | ylabel(sprintf('[%s]',unit)); -------------------------------------------------------------------------------- /@datasource/Transient.m: -------------------------------------------------------------------------------- 1 | function Transient(ds, varargin) 2 | %Generates Oil Pressure vs Time above idle to assess oil return 3 | 4 | persistent p 5 | if ~isempty(p) || true 6 | p = inputParser; 7 | p.FunctionName = 'OilReturnRate'; 8 | p.addRequired('ds', @(x) isa(x,'datasource')); 9 | p.addRequired('TrigChannel', @(x) ischar(x)); 10 | p.addRequired('Threshold', @isnumeric); 11 | p.addRequired('TrigUnit', @ischar); 12 | p.addRequired('DataChannel', @(x) ischar(x)); 13 | p.addRequired('Range', @(x) isfloat(x) && length(x)==2); 14 | p.addRequired('DataUnit', @ischar); 15 | p.addParameter('nBins', 50, @isfloat); 16 | p.addParameter('SampleRate', 0.5, @isfloat); 17 | p.addParameter('Normalization', 'pdf',... 18 | @(x) any(strcmp(x,{'count','probability','pdf'}))); 19 | end 20 | 21 | %Parse Inputs 22 | parse(p, ds, varargin{:}); 23 | in = p.Results; 24 | 25 | %Initialize arrays 26 | dataEdges = linspace(in.Range(1), in.Range(2), in.nBins+1); 27 | 28 | %% Process Datasource 29 | [count, timeEdges, duration] = mapReduce(ds, @mapFun, @reduceFun,... 30 | {in.TrigChannel, in.DataChannel}); 31 | 32 | %Define mapFun 33 | function [count, duration] = mapFun(ds) 34 | %Load Channels 35 | ds.loadChannel({in.TrigChannel, in.DataChannel}); ds.Sync; 36 | trigger = ds.getChannel(in.TrigChannel, 'unit', in.TrigUnit); 37 | channel = ds.getChannel(in.DataChannel, 'unit', in.DataUnit); 38 | 39 | %% Create trigger timer (Time since last trigger) 40 | timeData = trigger.Time; 41 | timeTrig = zeros(1, length(timeData)); 42 | 43 | trip = find(trigger.Value > in.Threshold); 44 | 45 | %Check if Threshold is reached 46 | if isempty(trip) 47 | count = []; 48 | duration = []; 49 | return 50 | end 51 | 52 | %Handle trip(1) = 1 case 53 | if trip(1) == 1 54 | timeTrig(1) = 1; 55 | trip(1) = []; 56 | end 57 | 58 | dataSampleRate = mean(diff(channel.Time)); 59 | for i = 1:length(trip) 60 | timeTrig(trip(i)) = timeTrig(trip(i)-1) + dataSampleRate; 61 | end 62 | 63 | %% Bin Data 64 | 65 | %Set Edges for time bin 66 | %Note: all data will land on the left edge of the bin 67 | timeRange = 0:in.SampleRate:max([timeTrig, 2*in.SampleRate]); 68 | 69 | % Run Binning 70 | count = histcounts2(timeTrig, channel.Value, timeRange, dataEdges); 71 | duration = range(channel.Time); 72 | end 73 | 74 | %Define Reduce Function 75 | function [count, timeEdges, duration] = reduceFun(count, duration) 76 | %Find max count size 77 | maxRow = 1; 78 | for i = 1:length(count) 79 | maxRow = max(maxRow, size(count{i}, 1)); 80 | end 81 | sumCount= zeros(maxRow, length(dataEdges)-1); 82 | 83 | %Concat counts 84 | for i = 1:length(count) 85 | curSize = size(count{i}); 86 | sumCount(1:curSize(1), 1:curSize(2)) = ... 87 | count{i} + sumCount(1:curSize(1), 1:curSize(2)); 88 | end 89 | count = sumCount; 90 | 91 | timeEdges = 0:in.SampleRate:(maxRow*in.SampleRate); 92 | 93 | %Concat Duration 94 | duration = sum([duration{:}]); 95 | end 96 | 97 | %% Normalize Counts 98 | switch in.Normalization 99 | case 'pdf' 100 | %Compute Size of each bin 101 | cellSize = range(in.Range) / size(count,2); 102 | 103 | %Norm by number of bins and area of each bin 104 | count = bsxfun(@rdivide, count, sum(count,2)*cellSize); 105 | 106 | %Set label for colorbar 107 | cBarLabel = 'log10(Probability Density)'; 108 | case 'probability' 109 | count = count ./ sum(sum(count)); 110 | 111 | %Set label for colorbar 112 | cBarLabel = 'log10(Probability)'; 113 | case 'count' 114 | %Set label for colorbar 115 | cBarLabel = 'log10(Count)'; 116 | end 117 | 118 | %Due to the wide range of orders of magnitude-> take the log 119 | count = log10(count); 120 | 121 | %Plot the histogram and turn of countour lines 122 | tBarPoints = timeEdges(1:end-1); 123 | yBarPoints = (dataEdges(1:end-1) + dataEdges(2:end))/2; 124 | [~,h] = contourf(tBarPoints,yBarPoints,count'); %Transpose as countourf and histocounts define x differently 125 | h.LineStyle = 'none'; 126 | box on 127 | 128 | %Add and label the colorbar 129 | cBar = colorbar; Datamaster.colormap('warm'); 130 | ylabel(cBar,cBarLabel, 'FontSize', 12); 131 | 132 | %Set the coloraxis to something reasonable 133 | valid = ~isinf(count) & ~isnan(count); 134 | m = mean(count(valid)); s = std(count(valid)); 135 | caxis([m-3*s, m+3*s]); 136 | 137 | %Label Histogram 138 | xlabel('Time [s]','interpreter','none') 139 | ylabel(sprintf('%s [%s]', in.DataChannel, in.DataUnit), 'interpreter','none') 140 | 141 | %Set Title 142 | durTitle = sprintf('Based on %3.2f hrs of data', duration/3600); 143 | trigTitle = sprintf('Threshold: %s > %f %s', in.TrigChannel,... 144 | in.Threshold, in.TrigUnit); 145 | title({durTitle, trigTitle}, 'interpreter','none'); 146 | end -------------------------------------------------------------------------------- /@datasource/datasource.m: -------------------------------------------------------------------------------- 1 | classdef datasource < handle 2 | %Class for accessing data stored in datasources 3 | 4 | properties (Access = private) 5 | Index = []; %Location of Entry in datastore 6 | Data = struct; %Structure of Logged Data 7 | Entry = struct; %Structure of masterDirectory 8 | Channel = {}; %Cell Array of logged channels 9 | Detail = []; %Structure of Details 10 | dm = []; %Handle to Datamaster Object 11 | MatPath = ''; %Full path to .mat file 12 | end 13 | 14 | methods 15 | function obj = datasource(dm,Entry) 16 | obj.dm = dm; 17 | obj.Entry = Entry; 18 | obj.Index = Entry.Index; 19 | 20 | obj.MatPath = fullfile(dm.getDatastore,[Entry.FinalHash '.mat']); 21 | end 22 | 23 | %Access Methods 24 | function channels = getLogged(ds) 25 | channels = ds.Entry.Channel(:); 26 | end 27 | 28 | function entry = getEntry(ds) 29 | 30 | %Assert that ds is singular 31 | assert(length(ds) ==1, 'getDetails only supports singular datasources'); 32 | 33 | %Return Entry 34 | entry = ds.Entry; 35 | end 36 | 37 | function clearData(ds,varargin) 38 | %Used to clear channel data loaded into memory by the datasource. 39 | 40 | %Assert that ds is singular 41 | assert(length(ds) ==1, 'clearData only supports singular datasources'); 42 | 43 | switch nargin 44 | case 1 45 | %Clear Loaded Data from memory 46 | ds.Data = struct; 47 | case 2 48 | if isfield(ds.Data,varargin{1}) 49 | ds.Data = rmfield(ds.Data,varargin{1}); 50 | end 51 | end 52 | end 53 | 54 | %Public Function Signatures 55 | channel = getChannel(ds,chanName,varargin) 56 | 57 | TimePlot(ds,varargin) 58 | 59 | openInMoTeC(ds) 60 | 61 | loadChannel(ds,channelNames) 62 | 63 | detail = getDetail(ds,Detail) 64 | 65 | duration = driveTime(ds,varargin) 66 | 67 | newTime = Sync(varargin) 68 | 69 | varargout = mapReduce(ds, mapFun, reduceFun, varargin) 70 | 71 | [cdf_2, x, y, duration] = CDF2(ds,varargin) 72 | end 73 | 74 | end 75 | 76 | -------------------------------------------------------------------------------- /@datasource/driveTime.m: -------------------------------------------------------------------------------- 1 | function duration = driveTime(ds,varargin) 2 | %Compute the Total time logged on the engine in seconds 3 | 4 | duration = 0; 5 | 6 | % Lop over Log Files 7 | for i = 1:length(ds) 8 | try 9 | duration = duration + range(ds(i).getChannel('Engine_RPM').Time); 10 | catch e 11 | warning(e.identifier,e.message); 12 | end 13 | end -------------------------------------------------------------------------------- /@datasource/getChannel.m: -------------------------------------------------------------------------------- 1 | function channel = getChannel(ds,chanName,varargin) 2 | 3 | %Validate Channel Names 4 | persistent p 5 | if ~isempty(p) || true 6 | p = inputParser; 7 | p.FunctionName = 'getChannel'; 8 | p.addRequired('ds',@(x) isa(x,'datasource') && length(x)==1); 9 | p.addRequired('chanName',@(x) ischar(x) || iscell(x)); 10 | p.addOptional('filter', 'none', @(x) any(strcmp(x,{'none','hampel','median'}))); 11 | p.addOptional('unit', 'base', @ischar); 12 | end 13 | 14 | %Extract Parameters 15 | parse(p,ds,chanName,varargin{:}); 16 | ds = p.Results.ds; 17 | chanName = p.Results.chanName; 18 | filter = p.Results.filter; 19 | 20 | if isa(chanName,'cell') 21 | for i = 1:length(chanName) 22 | channel.(chanName{i}) = ds.getChannel(chanName{i}); 23 | end 24 | else 25 | %% Check if Channel has been loaded or filtering is set 26 | if ~isfield(ds.Data,chanName) || ~strcmp(filter,'none') 27 | ds.clearData(chanName); 28 | ds.loadChannel(chanName); 29 | end 30 | 31 | %% Apply Filtering 32 | if ~isempty(filter) 33 | switch filter 34 | case 'hampel' 35 | %Number of samples on either side to be used when computing the std and median 36 | k = 13; 37 | 38 | %Number of Standard of deviations a sample must deviate to be an outlier 39 | nSigma = 3; 40 | 41 | %Apply Filter 42 | ds.Data.(chanName).Value = hampel(ds.Data.(chanName).Value,k,nSigma); 43 | case 'median' 44 | %Number of samples on either side to be used when computing the median 45 | n = 13; 46 | 47 | %Apply Filter 48 | ds.Data.(chanName).Value = medfilt1(ds.Data.(chanName).Value,n); 49 | end 50 | end 51 | 52 | %% Convert Units 53 | if ~strcmp(p.Results.unit,'') 54 | %Convert Unit 55 | [value, unit] = convertUnit(ds.Data.(chanName).Value,... 56 | ds.Data.(chanName).Units,... 57 | p.Results.unit); 58 | 59 | %Store Conversion to channel data 60 | ds.Data.(chanName).Value = value; 61 | ds.Data.(chanName).Units = unit; 62 | end 63 | 64 | %Return Channel 65 | channel = ds.Data.(chanName); 66 | end 67 | 68 | end 69 | 70 | function valid = validateChannel(ds,channel) 71 | valid = true; 72 | if isa(channel,'cell') 73 | i = 1; 74 | while i <= length(channel) && valid 75 | %Recursively Validate Each entry in the cell 76 | valid = valid && validateChannel(ds,channel{i}); 77 | i = i+1; 78 | end 79 | elseif isa(channel,'char') 80 | %Check if Channel was logged 81 | valid = any(strcmp(channel,ds.getLogged())); 82 | else 83 | valid = false; 84 | end 85 | end -------------------------------------------------------------------------------- /@datasource/getDetail.m: -------------------------------------------------------------------------------- 1 | function value = getDetail(ds, fieldName) 2 | %Get a specific detail from Detail Log 3 | 4 | %Assert that ds is singular 5 | assert(length(ds) ==1, 'getDetails only supports singular datasources'); 6 | 7 | %Load Details if missing 8 | if isempty(ds.Detail) 9 | %Get Logged Details 10 | DetailLog = ds.dm.mDirFetch(sprintf(['SELECT DetailLog.entryId, '... 11 | 'DetailName.fieldName, DetailLog.value, DetailLog.unit FROM DetailLog ',... 12 | 'INNER JOIN DetailName ON DetailName.id = DetailLog.fieldId ',... 13 | 'WHERE DetailLog.entryId IN (%s)'], strjoin(sprintfc('%d',ds.Entry.Index),','))); 14 | 15 | %Add DetailLog records to Details 16 | for j = 1:length(DetailLog) 17 | if isempty(DetailLog{j,4}) 18 | ds.Detail.(DetailLog{j,2}) = DetailLog{j,3}; 19 | else 20 | ds.Detail.(DetailLog{j,2}).Value = DetailLog{j,3}; 21 | ds.Detail.(DetailLog{j,2}).Unit = DetailLog{j,4}; 22 | end 23 | end 24 | end 25 | 26 | %Check if Detail exist 27 | if isfield(ds.Detail, fieldName) 28 | value = ds.Detail.(fieldName); %Return Detail 29 | else 30 | value = ''; %Detail is missing return empty 31 | end 32 | end -------------------------------------------------------------------------------- /@datasource/loadChannel.m: -------------------------------------------------------------------------------- 1 | function loadChannel(ds,channelNames) 2 | %Force channel names into a cell array 3 | if ~iscell(channelNames) 4 | channelNames = {channelNames}; 5 | end 6 | 7 | %Loop over each datasource 8 | for i = 1:length(ds) 9 | %Find missing channels 10 | isMissing = ~isfield(ds(i).Data,channelNames); 11 | 12 | %Load Missing Channels 13 | if any(isMissing) 14 | newData = load(ds(i).MatPath,channelNames{isMissing}); 15 | 16 | %Check that missing was loaded 17 | assert(all(isfield(newData, channelNames(isMissing))),... 18 | 'Channel Not Logged'); 19 | 20 | %Append to Data 21 | vars = fieldnames(newData); 22 | for j = 1:length(vars) 23 | ds(i).Data.(vars{j}) = newData.(vars{j}); 24 | 25 | %Replace ° with def 26 | ds(i).Data.(vars{j}).Units = ... 27 | strrep(newData.(vars{j}).Units, '°', 'deg'); 28 | end 29 | end 30 | end 31 | end -------------------------------------------------------------------------------- /@datasource/mapReduce.m: -------------------------------------------------------------------------------- 1 | function varargout = mapReduce(ds, mapFun, reduceFun, varargin) 2 | %Function for applying a function (mapFun) to each datasource 3 | 4 | %Create Input Parser 5 | persistent p 6 | if isempty(p) 7 | p = inputParser; 8 | p.FunctionName = 'mapReduce'; 9 | p.addRequired('ds', @(x) isa(x, 'datasource') && ~isempty(x)); 10 | p.addRequired('mapFun', @(x) isa(x, 'function_handle') && nargin(x) == 1); 11 | p.addRequired('reduceFun', @(x) isa(x, 'function_handle')); 12 | p.addOptional('channel', [], @(x) iscellstr(x) || ischar(x)) 13 | end 14 | 15 | %Process Inputs 16 | parse(p, ds, mapFun, reduceFun, varargin{:}); 17 | ds = p.Results.ds; 18 | mapFun = p.Results.mapFun; 19 | reduceFun =p.Results.reduceFun; 20 | 21 | %Grab datamaster from one of the datasources 22 | dm = ds(1).dm; 23 | 24 | %% Check for repeated required channels 25 | if ~iscell(p.Results.channel) 26 | ReqChannel = {p.Results.channel}; 27 | else 28 | ReqChannel = p.Results.channel; 29 | end 30 | ReqChannel = unique(ReqChannel); 31 | 32 | %% Filter out datasources missing required channels 33 | if ~isempty(p.Results.channel) 34 | %Find datasources with the required channels 35 | hasRequired = dm.getIndex('channel', ReqChannel); 36 | 37 | %Get the index of the current datasources 38 | currentIndex = [ds.Index]; 39 | 40 | %Filter out datasource missing all required variables 41 | [~, dropIndex] = setxor(currentIndex, hasRequired); 42 | ds(dropIndex) = []; 43 | 44 | %If no datasource remain throw error 45 | if isempty(ds) 46 | error('No datasource had all required channels'); 47 | end 48 | end 49 | 50 | %Initialize array for holding results 51 | MapFunOut = cell(length(ds), abs(nargout(mapFun))); 52 | 53 | %% Loop over each datasource 54 | nDatasource = length(ds); 55 | textprogressbar('Processing Datasources: ', 'new'); 56 | for i = 1:nDatasource 57 | %Attempt to load required channels 58 | if ~isempty(p.Results.channel) 59 | ds(i).loadChannel(p.Results.channel); 60 | end 61 | %Apply mapFunction to each Datasource 62 | [MapFunOut{i,:}] = mapFun(ds(i)); 63 | 64 | %Make sure all data is unloaded 65 | ds(i).clearData; 66 | 67 | %Update progress bar 68 | textprogressbar(100*i/nDatasource); 69 | end 70 | textprogressbar('done'); 71 | 72 | %Expand MapFunOut to separate each output of mapFun 73 | results = cell(size(MapFunOut,2),1); 74 | for i = 1:size(MapFunOut,2) 75 | %Keep results as a cell array 76 | results{i} = MapFunOut(:,i); 77 | end 78 | 79 | %% Pass the consolidated outputs of mapFun to reduceFun 80 | 81 | %Initialize varargout to capture all the outputs of reduceFun 82 | varargout = cell(1,nargout(reduceFun)); 83 | 84 | %Run reduceFun 85 | [varargout{:}] = reduceFun(results{:}); -------------------------------------------------------------------------------- /@datasource/openInMoTeC.m: -------------------------------------------------------------------------------- 1 | function openInMoTeC(ds) 2 | 3 | %Set Google Drive Location 4 | pathGoogleDrive = 'C:\Users\Alex\'; 5 | 6 | %Check a datasource was provided 7 | if ~isempty(ds) 8 | %Start MoTeC COM Server 9 | i2 = actxserver('MoTeC.i2Application'); 10 | i2.Visible = 1; 11 | pause(1); %Wait for MoTeC to Open 12 | 13 | for i = 1:length(ds) 14 | %Open Datasource in MoTeC 15 | path = fullfile(pathGoogleDrive,ds(i).Entry.Origin); 16 | i2.DataSources.Open(path); 17 | end 18 | 19 | end -------------------------------------------------------------------------------- /@sqlite/sqlite.m: -------------------------------------------------------------------------------- 1 | classdef sqlite 2 | %Class for connecting to an sqlite database 3 | 4 | properties 5 | conn = []; 6 | end 7 | 8 | methods 9 | function obj = sqlite(dbpath) 10 | import py.sqlite3.* 11 | 12 | obj.conn = py.sqlite3.connect(dbpath); 13 | end 14 | 15 | function execute(obj, SQLQuery, varargin) 16 | try 17 | if numel(varargin) > 1 18 | obj.conn.execute(SQLQuery, varargin); 19 | else 20 | obj.conn.execute(SQLQuery, py.list(varargin{:})); 21 | end 22 | catch pyE 23 | e.message = sprintf('SQL Query Failed: %s\n%s',... 24 | SQLQuery, pyE.message); 25 | e.identifier = 'Datamaster:sqlite:execute'; 26 | error(e); 27 | end 28 | end 29 | 30 | function executemany(obj, SQLQuery, varargin) 31 | obj.conn.executemany(SQLQuery,varargin) 32 | end 33 | 34 | function record = fetch(obj, SQLQuery) 35 | %Run and fetch the query 36 | cur = obj.conn.execute(SQLQuery); 37 | results = cur.fetchall(); 38 | 39 | %Convert py.list to cell 40 | results = cell(results); 41 | if ~isempty(results) 42 | record = cell(length(results),length(results{1})); 43 | for i = 1:length(results) 44 | record(i,:) = cell(results{i}); 45 | end 46 | 47 | %Convert python datatypes to matlab 48 | record = cellfun(@py2Mat,record, 'UniformOutput',0); 49 | else 50 | record = []; 51 | end 52 | end 53 | end 54 | end 55 | 56 | function matData = py2Mat(pyData) 57 | switch class(pyData) 58 | case 'py.str' 59 | %Convert py.str to char 60 | matData = char(pyData); 61 | case 'py.unicode' 62 | %Convert py.unicode to char 63 | matData = char(pyData); 64 | case 'py.int' 65 | %Convert py.int to double 66 | matData = double(pyData); 67 | case 'py.NoneType' 68 | matData = []; 69 | otherwise 70 | %Leave as is 71 | matData = pyData; 72 | end 73 | 74 | end 75 | -------------------------------------------------------------------------------- /DataHash.m: -------------------------------------------------------------------------------- 1 | function Hash = DataHash(Data, Opt) 2 | % DATAHASH - Checksum for Matlab array of any type 3 | % This function creates a hash value for an input of any type. The type and 4 | % dimensions of the input are considered as default, such that UINT8([0,0]) and 5 | % UINT16(0) have different hash values. Nested STRUCTs and CELLs are parsed 6 | % recursively. 7 | % 8 | % Hash = DataHash(Data, Opt) 9 | % INPUT: 10 | % Data: Array of these built-in types: 11 | % (U)INT8/16/32/64, SINGLE, DOUBLE, (real/complex, full/sparse) 12 | % CHAR, LOGICAL, CELL (nested), STRUCT (scalar or array, nested), 13 | % function_handle. 14 | % Opt: Struct to specify the hashing algorithm and the output format. 15 | % Opt and all its fields are optional. 16 | % Opt.Method: String, known methods for Java 1.6 (Matlab 2011b): 17 | % 'SHA-1', 'SHA-256', 'SHA-384', 'SHA-512', 'MD2', 'MD5'. 18 | % Call DataHash without inputs to get a list of available methods. 19 | % Default: 'MD5'. 20 | % Opt.Format: String specifying the output format: 21 | % 'hex', 'HEX': Lower/uppercase hexadecimal string. 22 | % 'double', 'uint8': Numerical vector. 23 | % 'base64': Base64 encoded string, only printable ASCII 24 | % characters, shorter than 'hex', no padding. 25 | % Default: 'hex'. 26 | % Opt.Input: Type of the input as string, not case-sensitive: 27 | % 'array': The contents, type and size of the input [Data] are 28 | % considered for the creation of the hash. Nested CELLs 29 | % and STRUCT arrays are parsed recursively. Empty arrays of 30 | % different type reply different hashs. 31 | % 'file': [Data] is treated as file name and the hash is calculated 32 | % for the files contents. 33 | % 'bin': [Data] is a numerical, LOGICAL or CHAR array. Only the 34 | % binary contents of the array is considered, such that 35 | % e.g. empty arrays of different type reply the same hash. 36 | % 'ascii': Same as 'bin', but only the 8-bit ASCII part of the 16-bit 37 | % Matlab CHARs is considered. 38 | % Default: 'array'. 39 | % 40 | % OUTPUT: 41 | % Hash: String, DOUBLE or UINT8 vector. The length depends on the hashing 42 | % method. 43 | % 44 | % EXAMPLES: 45 | % % Default: MD5, hex: 46 | % DataHash([]) % 5b302b7b2099a97ba2a276640a192485 47 | % % MD5, Base64: 48 | % Opt = struct('Format', 'base64', 'Method', 'MD5'); 49 | % DataHash(int32(1:10), Opt) % +tJN9yeF89h3jOFNN55XLg 50 | % % SHA-1, Base64: 51 | % S.a = uint8([]); 52 | % S.b = {{1:10}, struct('q', uint64(415))}; 53 | % Opt.Method = 'SHA-1'; 54 | % Opt.Format = 'HEX'; 55 | % DataHash(S, Opt) % 18672BE876463B25214CA9241B3C79CC926F3093 56 | % % SHA-1 of binary values: 57 | % Opt = struct('Method', 'SHA-1', 'Input', 'bin'); 58 | % DataHash(1:8, Opt) % 826cf9d3a5d74bbe415e97d4cecf03f445f69225 59 | % % SHA-256, consider ASCII part only (Matlab's CHAR has 16 bits!): 60 | % Opt.Method = 'SHA-256'; 61 | % Opt.Input = 'ascii'; 62 | % DataHash('abc', Opt) 63 | % % ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad 64 | % % Or equivalently: 65 | % Opt.Input = 'bin'; 66 | % DataHash(uint8('abc'), Opt) 67 | % 68 | % NOTES: 69 | % Function handles and user-defined objects cannot be converted uniquely: 70 | % - The subfunction ConvertFuncHandle uses the built-in function FUNCTIONS, 71 | % but the replied struct can depend on the Matlab version. 72 | % - It is tried to convert objects to UINT8 streams in the subfunction 73 | % ConvertObject. A conversion by STRUCT() might be more appropriate. 74 | % Adjust these subfunctions on demand. 75 | % 76 | % MATLAB CHARs have 16 bits! Use Opt.Input='ascii' for comparisons with e.g. 77 | % online hash generators. 78 | % 79 | % Matt Raum suggested this for e.g. user-defined objects: 80 | % DataHash(getByteStreamFromArray(Data) 81 | % This works very well, but unfortunately getByteStreamFromArray is 82 | % undocumented, such that it might vanish in the future or reply different 83 | % output. 84 | % 85 | % For arrays the calculated hash value might be changed in new versions. 86 | % Calling this function without inputs replies the version of the hash. 87 | % 88 | % The C-Mex function GetMD5 is 2 to 100 times faster, but obtains MD5 only: 89 | % http://www.mathworks.com/matlabcentral/fileexchange/25921 90 | % 91 | % Tested: Matlab 7.7, 7.8, 7.13, 8.6, WinXP/32, Win7/64 92 | % Author: Jan Simon, Heidelberg, (C) 2011-2016 matlab.2010(a)n(MINUS)simon.de 93 | % 94 | % See also: TYPECAST, CAST. 95 | % 96 | % Michael Kleder, "Compute Hash", no structs and cells: 97 | % http://www.mathworks.com/matlabcentral/fileexchange/8944 98 | % Tim, "Serialize/Deserialize", converts structs and cells to a byte stream: 99 | % http://www.mathworks.com/matlabcentral/fileexchange/29457 100 | 101 | % $JRev: R-H V:033 Sum:R+m7rAPNLvlw Date:18-Jun-2016 14:33:17 $ 102 | % $License: BSD (use/copy/change/redistribute on own risk, mention the author) $ 103 | % $File: Tools\GLFile\DataHash.m $ 104 | % History: 105 | % 001: 01-May-2011 21:52, First version. 106 | % 007: 10-Jun-2011 10:38, [Opt.Input], binary data, complex values considered. 107 | % 011: 26-May-2012 15:57, Fixed: Failed for binary input and empty data. 108 | % 014: 04-Nov-2012 11:37, Consider Mex-, MDL- and P-files also. 109 | % Thanks to David (author 243360), who found this bug. 110 | % Jan Achterhold (author 267816) suggested to consider Java objects. 111 | % 016: 01-Feb-2015 20:53, Java heap space exhausted for large files. 112 | % Now files are process in chunks to save memory. 113 | % 017: 15-Feb-2015 19:40, Collsions: Same hash for different data. 114 | % Examples: zeros(1,1) and zeros(1,1,0) 115 | % complex(0) and zeros(1,1,0,0) 116 | % Now the number of dimensions is included, to avoid this. 117 | % 022: 30-Mar-2015 00:04, Bugfix: Failed for strings and [] without TYPECASTX. 118 | % Ross found these 2 bugs, which occur when TYPECASTX is not installed. 119 | % If you need the base64 format padded with '=' characters, adjust 120 | % fBase64_enc as you like. 121 | % 026: 29-Jun-2015 00:13, Changed hash for STRUCTs. 122 | % Struct arrays are analysed field by field now, which is much faster. 123 | % 027: 13-Sep-2015 19:03, 'ascii' input as abbrev. for Input='bin' and UINT8(). 124 | % 028: 15-Oct-2015 23:11, Example values in help section updated to v022. 125 | % 029: 16-Oct-2015 22:32, Use default options for empty input. 126 | % 031: 28-Feb-2016 15:10, New hash value to get same reply as GetMD5. 127 | % New Matlab version (at least 2015b) use a fast method for TYPECAST, such 128 | % that calling James Tursa's TYPECASTX is not needed anymore. 129 | % Matlab 6.5 not supported anymore: MException for CATCH. 130 | % 033: 18-Jun-2016 14:28, BUGFIX: Failed on empty files. 131 | % Thanks to Christian (AuthorID 2918599). 132 | 133 | % OPEN BUGS: 134 | % Nath wrote: 135 | % function handle refering to struct containing the function will create 136 | % infinite loop. Is there any workaround ? 137 | % Example: 138 | % d= dynamicprops(); 139 | % addprop(d,'f'); 140 | % d.f= @(varargin) struct2cell(d); 141 | % DataHash(d.f) % infinite loop 142 | % This is caught with an error message concerning the recursion limit now. 143 | 144 | % Main function: =============================================================== 145 | % Default options: ------------------------------------------------------------- 146 | Method = 'MD5'; 147 | OutFormat = 'hex'; 148 | isFile = false; 149 | isBin = false; 150 | 151 | % Check number and type of inputs: --------------------------------------------- 152 | nArg = nargin; 153 | if nArg == 2 154 | if isa(Opt, 'struct') == 0 % Bad type of 2nd input: 155 | Error_L('BadInput2', '2nd input [Opt] must be a struct.'); 156 | end 157 | 158 | % Specify hash algorithm: 159 | if isfield(Opt, 'Method') && ~isempty(Opt.Method) % Short-circuiting 160 | Method = upper(Opt.Method); 161 | end 162 | 163 | % Specify output format: 164 | if isfield(Opt, 'Format') && ~isempty(Opt.Format) % Short-circuiting 165 | OutFormat = Opt.Format; 166 | end 167 | 168 | % Check if the Input type is specified - default: 'array': 169 | if isfield(Opt, 'Input') && ~isempty(Opt.Input) % Short-circuiting 170 | if strcmpi(Opt.Input, 'File') 171 | if ischar(Data) == 0 172 | Error_L('CannotOpen', '1st input FileName must be a string'); 173 | end 174 | isFile = true; 175 | 176 | elseif strncmpi(Opt.Input, 'bin', 3) % Accept 'binary' also 177 | if (isnumeric(Data) || ischar(Data) || islogical(Data)) == 0 || ... 178 | issparse(Data) 179 | Error_L('BadDataType', ... 180 | '1st input must be numeric, CHAR or LOGICAL for binary input.'); 181 | end 182 | isBin = true; 183 | 184 | elseif strncmpi(Opt.Input, 'asc', 3) % 8-bit ASCII characters 185 | if ~ischar(Data) 186 | Error_L('BadDataType', ... 187 | '1st input must be a CHAR for the input type ASCII.'); 188 | end 189 | isBin = true; 190 | Data = uint8(Data); 191 | end 192 | end 193 | 194 | elseif nArg == 0 % Reply version of this function: 195 | R = Version_L; 196 | 197 | if nargout == 0 198 | disp(R); 199 | else 200 | Hash = R; 201 | end 202 | 203 | return; 204 | 205 | elseif nArg ~= 1 % Bad number of arguments: 206 | Error_L('BadNInput', '1 or 2 inputs required.'); 207 | end 208 | 209 | % Create the engine: ----------------------------------------------------------- 210 | try 211 | Engine = java.security.MessageDigest.getInstance(Method); 212 | catch 213 | Error_L('BadInput2', 'Invalid algorithm: [%s].', Method); 214 | end 215 | 216 | % Create the hash value: ------------------------------------------------------- 217 | if isFile 218 | % Open the file: 219 | FID = fopen(Data, 'r'); 220 | if FID < 0 221 | % Check existence of file: 222 | Found = FileExist_L(Data); 223 | if Found 224 | Error_L('CantOpenFile', 'Cannot open file: %s.', Data); 225 | else 226 | Error_L('FileNotFound', 'File not found: %s.', Data); 227 | end 228 | end 229 | 230 | % Read file in chunks to save memory and Java heap space: 231 | Chunk = 1e6; % Fastest for 1e6 on Win7/64, HDD 232 | Count = Chunk; % Dummy value to satisfy WHILE condition 233 | while Count == Chunk 234 | [Data, Count] = fread(FID, Chunk, '*uint8'); 235 | if Count ~= 0 % Avoid error for empty file 236 | Engine.update(Data); 237 | end 238 | end 239 | fclose(FID); 240 | 241 | % Calculate the hash: 242 | Hash = typecast(Engine.digest, 'uint8'); 243 | 244 | elseif isBin % Contents of an elementary array, type tested already: 245 | if isempty(Data) % Nothing to do, Engine.update fails for empty input! 246 | Hash = typecast(Engine.digest, 'uint8'); 247 | else % Matlab's TYPECAST is less elegant: 248 | if isnumeric(Data) 249 | if isreal(Data) 250 | Engine.update(typecast(Data(:), 'uint8')); 251 | else 252 | Engine.update(typecast(real(Data(:)), 'uint8')); 253 | Engine.update(typecast(imag(Data(:)), 'uint8')); 254 | end 255 | elseif islogical(Data) % TYPECAST cannot handle LOGICAL 256 | Engine.update(typecast(uint8(Data(:)), 'uint8')); 257 | elseif ischar(Data) % TYPECAST cannot handle CHAR 258 | Engine.update(typecast(uint16(Data(:)), 'uint8')); 259 | % Bugfix: Line removed 260 | end 261 | Hash = typecast(Engine.digest, 'uint8'); 262 | end 263 | else % Array with type: 264 | Engine = CoreHash(Data, Engine); 265 | Hash = typecast(Engine.digest, 'uint8'); 266 | end 267 | 268 | % Convert hash specific output format: ----------------------------------------- 269 | switch OutFormat 270 | case 'hex' 271 | Hash = sprintf('%.2x', double(Hash)); 272 | case 'HEX' 273 | Hash = sprintf('%.2X', double(Hash)); 274 | case 'double' 275 | Hash = double(reshape(Hash, 1, [])); 276 | case 'uint8' 277 | Hash = reshape(Hash, 1, []); 278 | case 'base64' 279 | Hash = fBase64_enc(double(Hash)); 280 | otherwise 281 | Error_L('BadOutFormat', ... 282 | '[Opt.Format] must be: HEX, hex, uint8, double, base64.'); 283 | end 284 | 285 | % return; 286 | 287 | % ****************************************************************************** 288 | function Engine = CoreHash(Data, Engine) 289 | % This methods uses the slower TYPECAST of Matlab 290 | 291 | % Consider the type and dimensions of the array to distinguish arrays with the 292 | % same data, but different shape: [0 x 0] and [0 x 1], [1,2] and [1;2], 293 | % DOUBLE(0) and SINGLE([0,0]): 294 | % < v016: [class, size, data]. BUG! 0 and zeros(1,1,0) had the same hash! 295 | % >= v016: [class, ndims, size, data] 296 | Engine.update([uint8(class(Data)), ... 297 | typecast(uint64([ndims(Data), size(Data)]), 'uint8')]); 298 | 299 | if issparse(Data) % Sparse arrays to struct: 300 | [S.Index1, S.Index2, S.Value] = find(Data); 301 | Engine = CoreHash(S, Engine); 302 | elseif isstruct(Data) % Hash for all array elements and fields: 303 | F = sort(fieldnames(Data)); % Ignore order of fields 304 | for iField = 1:length(F) % Loop over fields 305 | aField = F{iField}; 306 | Engine.update(uint8(aField)); 307 | for iS = 1:numel(Data) % Loop over elements of struct array 308 | Engine = CoreHash(Data(iS).(aField), Engine); 309 | end 310 | end 311 | elseif iscell(Data) % Get hash for all cell elements: 312 | for iS = 1:numel(Data) 313 | Engine = CoreHash(Data{iS}, Engine); 314 | end 315 | elseif isempty(Data) % Nothing to do 316 | elseif isnumeric(Data) 317 | if isreal(Data) 318 | Engine.update(typecast(Data(:), 'uint8')); 319 | else 320 | Engine.update(typecast(real(Data(:)), 'uint8')); 321 | Engine.update(typecast(imag(Data(:)), 'uint8')); 322 | end 323 | elseif islogical(Data) % TYPECAST cannot handle LOGICAL 324 | Engine.update(typecast(uint8(Data(:)), 'uint8')); 325 | elseif ischar(Data) % TYPECAST cannot handle CHAR 326 | Engine.update(typecast(uint16(Data(:)), 'uint8')); 327 | elseif isa(Data, 'function_handle') 328 | Engine = CoreHash(ConvertFuncHandle(Data), Engine); 329 | elseif (isobject(Data) || isjava(Data)) && ismethod(Data, 'hashCode') 330 | Engine = CoreHash(char(Data.hashCode), Engine); 331 | else % Most likely a user-defined object: 332 | try 333 | BasicData = ConvertObject(Data); 334 | catch ME 335 | error(['JSimon:', mfilename, ':BadDataType'], ... 336 | '%s: Cannot create elementary array for type: %s\n %s', ... 337 | mfilename, class(Data), ME.message); 338 | end 339 | 340 | try 341 | Engine = CoreHash(BasicData, Engine); 342 | catch ME 343 | if strcmpi(ME.identifier, 'MATLAB:recursionLimit') 344 | ME = MException(['JSimon:', mfilename, ':RecursiveType'], ... 345 | '%s: Cannot create hash for recursive data type: %s', ... 346 | mfilename, class(Data)); 347 | end 348 | throw(ME); 349 | end 350 | end 351 | 352 | % return; 353 | 354 | % ****************************************************************************** 355 | function FuncKey = ConvertFuncHandle(FuncH) 356 | % The subfunction ConvertFuncHandle converts function_handles to a struct 357 | % using the Matlab function FUNCTIONS. The output of this function changes 358 | % with the Matlab version, such that DataHash(@sin) replies different hashes 359 | % under Matlab 6.5 and 2009a. 360 | % An alternative is using the function name and name of the file for 361 | % function_handles, but this is not unique for nested or anonymous functions. 362 | % If the MATLABROOT is removed from the file's path, at least the hash of 363 | % Matlab's toolbox functions is (usually!) not influenced by the version. 364 | % Finally I'm in doubt if there is a unique method to hash function handles. 365 | % Please adjust the subfunction ConvertFuncHandles to your needs. 366 | 367 | % The Matlab version influences the conversion by FUNCTIONS: 368 | % 1. The format of the struct replied FUNCTIONS is not fixed, 369 | % 2. The full paths of toolbox function e.g. for @mean differ. 370 | FuncKey = functions(FuncH); 371 | 372 | % Include modification file time and file size. Suggested by Aslak Grinsted: 373 | if ~isempty(FuncKey.file) 374 | d = dir(FuncKey.file); 375 | if ~isempty(d) 376 | FuncKey.filebytes = d.bytes; 377 | FuncKey.filedate = d.datenum; 378 | end 379 | end 380 | 381 | % ALTERNATIVE: Use name and path. The part of the toolbox functions 382 | % is replaced such that the hash for @mean does not depend on the Matlab 383 | % version. 384 | % Drawbacks: Anonymous functions, nested functions... 385 | % funcStruct = functions(FuncH); 386 | % funcfile = strrep(funcStruct.file, matlabroot, ''); 387 | % FuncKey = uint8([funcStruct.function, ' ', funcfile]); 388 | 389 | % Finally I'm afraid there is no unique method to get a hash for a function 390 | % handle. Please adjust this conversion to your needs. 391 | 392 | % return; 393 | 394 | % ****************************************************************************** 395 | function DataBin = ConvertObject(DataObj) 396 | % Convert a user-defined object to a binary stream. There cannot be a unique 397 | % solution, so this part is left for the user... 398 | 399 | try % Perhaps a direct conversion is implemented: 400 | DataBin = uint8(DataObj); 401 | 402 | % Matt Raum had this excellent idea - unfortunately this function is 403 | % undocumented and might not be supported in te future: 404 | % DataBin = getByteStreamFromArray(DataObj); 405 | 406 | catch % Or perhaps this is better: 407 | WarnS = warning('off', 'MATLAB:structOnObject'); 408 | DataBin = struct(DataObj); 409 | warning(WarnS); 410 | end 411 | 412 | % return; 413 | 414 | % ****************************************************************************** 415 | function Out = fBase64_enc(In) 416 | % Encode numeric vector of UINT8 values to base64 string. 417 | % The intention of this is to create a shorter hash than the HEX format. 418 | % Therefore a padding with '=' characters is omitted on purpose. 419 | 420 | Pool = [65:90, 97:122, 48:57, 43, 47]; % [0:9, a:z, A:Z, +, /] 421 | v8 = [128; 64; 32; 16; 8; 4; 2; 1]; 422 | v6 = [32, 16, 8, 4, 2, 1]; 423 | 424 | In = reshape(In, 1, []); 425 | X = rem(floor(In(ones(8, 1), :) ./ v8(:, ones(length(In), 1))), 2); 426 | Y = reshape([X(:); zeros(6 - rem(numel(X), 6), 1)], 6, []); 427 | Out = char(Pool(1 + v6 * Y)); 428 | 429 | % return; 430 | 431 | % ****************************************************************************** 432 | function Ex = FileExist_L(FileName) 433 | % A more reliable version of EXIST(FileName, 'file'): 434 | dirFile = dir(FileName); 435 | if length(dirFile) == 1 436 | Ex = ~(dirFile.isdir); 437 | else 438 | Ex = false; 439 | end 440 | 441 | % return; 442 | 443 | % ****************************************************************************** 444 | function R = Version_L() 445 | % The output differs between versions of this function. So give the user a 446 | % chance to recognize the version: 447 | % 1: 01-May-2011, Initial version 448 | % 2: 15-Feb-2015, The number of dimensions is considered in addition. 449 | % In version 1 these variables had the same hash: 450 | % zeros(1,1) and zeros(1,1,0), complex(0) and zeros(1,1,0,0) 451 | % 3: 29-Jun-2015, Struct arrays are processed field by field and not element 452 | % by element, because this is much faster. In consequence the hash value 453 | % differs, if the input contains a struct. 454 | % 4: 28-Feb-2016 15:20, same output as GetMD5 for MD5 sums. Therefore the 455 | % dimensions are casted to UINT64 at first. 456 | R.HashVersion = 4; 457 | R.Date = [2016, 2, 28]; 458 | 459 | R.HashMethod = {}; 460 | try 461 | Provider = java.security.Security.getProviders; 462 | for iProvider = 1:numel(Provider) 463 | S = char(Provider(iProvider).getServices); 464 | Index = strfind(S, 'MessageDigest.'); 465 | for iDigest = 1:length(Index) 466 | Digest = strtok(S(Index(iDigest):end)); 467 | Digest = strrep(Digest, 'MessageDigest.', ''); 468 | R.HashMethod = cat(2, R.HashMethod, {Digest}); 469 | end 470 | end 471 | catch ME 472 | fprintf(2, '%s\n', ME.message); 473 | R.HashMethod = 'error'; 474 | end 475 | 476 | % return; 477 | 478 | % ****************************************************************************** 479 | function Error_L(ID, varargin) 480 | 481 | error(['JSimon:', mfilename, ':', ID], ['*** %s: ', varargin{1}], ... 482 | mfilename, varargin{2:nargin - 1}); 483 | 484 | % return; 485 | -------------------------------------------------------------------------------- /DatamasterSetup.mlapp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awadell1/Datamaster/1cd51abd47c3178bb925c7b01014ee36f6b5f821/DatamasterSetup.mlapp -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Alexius Wadell 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Legal/DataHash_license.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Jan Simon 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in 12 | the documentation and/or other materials provided with the distribution 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 15 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 18 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 19 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 20 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 21 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 22 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 23 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 24 | POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /Legal/NumPy_license.txt: -------------------------------------------------------------------------------- 1 | NumPy license 2 | 3 | Copyright © 2005-2016, NumPy Developers. 4 | All rights reserved. 5 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 6 | 7 | Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 8 | Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | Neither the name of the NumPy Developers nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 11 | 12 | Source: http://www.numpy.org/license.html -------------------------------------------------------------------------------- /Legal/SQLite_license.txt: -------------------------------------------------------------------------------- 1 | SQLite Is Public Domain 2 | 3 | 4 | SQLite is in the 5 | Public Domain 6 | All of the code and documentation in SQLite has been dedicated to the public domain by the authors. All code authors, and representatives of the companies they work for, have signed affidavits dedicating their contributions to the public domain and originals of those signed affidavits are stored in a firesafe at the main offices of Hwaci. Anyone is free to copy, modify, publish, use, compile, sell, or distribute the original SQLite code, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. 7 | 8 | The previous paragraph applies to the deliverable code and documentation in SQLite - those parts of the SQLite library that you actually bundle and ship with a larger application. Some scripts used as part of the build process (for example the "configure" scripts generated by autoconf) might fall under other open-source licenses. Nothing from these build scripts ever reaches the final deliverable SQLite library, however, and so the licenses associated with those scripts should not be a factor in assessing your rights to copy and use the SQLite library. 9 | 10 | All of the deliverable code in SQLite has been written from scratch. No code has been taken from other projects or from the open internet. Every line of code can be traced back to its original author, and all of those authors have public domain dedications on file. So the SQLite code base is clean and is uncontaminated with licensed code from other projects. 11 | 12 | Buy An SQLite License 13 | Obtaining An License To Use SQLite 14 | 15 | Even though SQLite is in the public domain and does not require a license, some users want to obtain a license anyway. Some reasons for obtaining a license include: 16 | 17 | Your company desires warranty of title and/or indemnity against claims of copyright infringement. 18 | You are using SQLite in a jurisdiction that does not recognize the public domain. 19 | You are using SQLite in a jurisdiction that does not recognize the right of an author to dedicate their work to the public domain. 20 | You want to hold a tangible legal document as evidence that you have the legal right to use and distribute SQLite. 21 | Your legal department tells you that you have to purchase a license. 22 | If you feel like you really need to purchase a license for SQLite, Hwaci, the company that employs all the developers of SQLite, will sell you one. All proceeds from the sale of SQLite licenses are used to fund continuing improvement and support of SQLite. 23 | 24 | Contributed Code 25 | 26 | In order to keep SQLite completely free and unencumbered by copyright, all new contributors to the SQLite code base are asked to dedicate their contributions to the public domain. If you want to send a patch or enhancement for possible inclusion in the SQLite source tree, please accompany the patch with the following statement: 27 | 28 | The author or authors of this code dedicate any and all copyright interest in this code to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this code under copyright law. 29 | We are not able to accept patches or changes to SQLite that are not accompanied by a statement such as the above. In addition, if you make changes or enhancements as an employee, then a simple statement such as the above is insufficient. You must also send by surface mail a copyright release signed by a company officer. A signed original of the copyright release should be mailed to: 30 | 31 | Hwaci 32 | 6200 Maple Cove Lane 33 | Charlotte, NC 28269 34 | USA 35 | A template copyright release is available in PDF or HTML. You can use this release to make future changes. -------------------------------------------------------------------------------- /Legal/pint_license.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012 by Hernan E. Grecco and contributors. See AUTHORS 2 | for more details. 3 | 4 | Some rights reserved. 5 | 6 | Redistribution and use in source and binary forms of the software as well 7 | as documentation, with or without modification, are permitted provided 8 | that the following conditions are met: 9 | 10 | * Redistributions of source code must retain the above copyright 11 | notice, this list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above 14 | copyright notice, this list of conditions and the following 15 | disclaimer in the documentation and/or other materials provided 16 | with the distribution. 17 | 18 | * The names of the contributors may not be used to endorse or 19 | promote products derived from this software without specific 20 | prior written permission. 21 | 22 | THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND 23 | CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT 24 | NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 25 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER 26 | OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 27 | EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 28 | PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 29 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 30 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 31 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 32 | SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 33 | DAMAGE. 34 | 35 | 36 | Public Git Repo Located at: https://github.com/hgrecco/pint -------------------------------------------------------------------------------- /Legal/textprogressbar_license.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010, Paul Proteus 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in 12 | the documentation and/or other materials provided with the distribution 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 15 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 18 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 19 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 20 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 21 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 22 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 23 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 24 | POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /Python/ConnectGoogleDrive.py: -------------------------------------------------------------------------------- 1 | # ConnectGoogleDrive.py 2 | from oauth2client.client import OAuth2WebServerFlow 3 | from oauth2client import tools 4 | from oauth2client.contrib.keyring_storage import Storage 5 | from apiclient.discovery import build 6 | import httplib2 7 | 8 | def getAuthToken(client_id, client_secret): 9 | # Check if stored credentials are valid 10 | storage = Storage('Datamaster', 'user1') 11 | credential = storage.get() 12 | 13 | # Check if credential are still valid 14 | if (credential is None or credential.invalid): 15 | # Credentials expired -> Get new one 16 | 17 | # Create Flow object to handle OAuth 2.0 18 | flow = OAuth2WebServerFlow(client_id=client_id, 19 | client_secret=client_secret, 20 | scope='https://www.googleapis.com/auth/drive', 21 | redirect_uri='urn:ietf:wg:oauth:2.0:oob') 22 | 23 | # Get and store new credential 24 | credential = tools.run_flow(flow, storage) 25 | return credential 26 | 27 | 28 | def getFileList(client_id, client_secret): 29 | # Authenticate with Google Drive 30 | credential = getAuthToken(client_id, client_secret) 31 | http = credential.authorize(httplib2.Http()) 32 | drive = build('drive', 'v3', http=http) 33 | 34 | pageToken = None 35 | file = [] 36 | while True: 37 | # Grab a page of files from the server 38 | response = drive.files().list(q='fileExtension = \'ld\' or fileExtension = \'ldx\'', 39 | fields='nextPageToken, files(id, name, md5Checksum, modifiedTime, webContentLink)', 40 | pageSize=1000, 41 | orderBy='modifiedTime', 42 | pageToken=pageToken).execute() 43 | 44 | # Get the token for the next page 45 | pageToken = response.get('nextPageToken') 46 | 47 | # Append to the list of files 48 | file = file + response.get('files', []) 49 | 50 | # Break if no more pages 51 | if pageToken is None: 52 | break 53 | return file 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## What is Datamaster ## 2 | Datamaster is a set of MATLAB objects that enable multi-MoTeC Log file analysis, developed for Cornell Racing to enable designers to validate load cases or quantify system by quickly analyzing thousands of log files. 3 | 4 | * Quickly filter datasources to examine only relevant data 5 | * Leverage the power of MATLAB for advanced signal filtering, plotting and more 6 | * Baked in analysis tools for analyzing car performance (ie. gg-circles, torque curves, etc) 7 | 8 | ## Getting Started with Datamaster ## 9 | Assuming MATLAB is already installed, download and run this setup GUI: [here](https://github.com/awadell1/Datamaster/raw/master/DatamasterSetup.mlapp) 10 | 11 | If you have not already installed MATLAB, do that now. 12 | 13 | You can check that everything was installed correctly by running the following command in MATLAB: 14 | ```matlab 15 | close all 16 | dm = Datamaster; ds = dm.getDatasource; 17 | ds(1:100).Histogram2('Engine_RPM', 'Manifold_Pres',[0, 10000; 70 170], 'unit', {'rpm', 'kPa'}); 18 | ``` 19 | 20 | Once installed check out the [wiki](https://github.com/awadell1/Datamaster/wiki/Welcome-to-the-Datamaster-wiki!) for documentation, examples and troubleshooting guides. 21 | 22 | ## For Mac and Linux Users ## 23 | Datamaster is built using cross-platform tools (MATLAB and Python) and has been tested using Linux. However, at present Datamaster is developed solely on a PC and thus other platforms, while not unsupported are largely untested. If you do run into any bug/ missing feature for your platform, please submit a bug/feature request. However given my personal lack of access to non PC platforms, any less than obvious fixes may take time. 24 | 25 | ## Bug Reporting ## 26 | Datamaster is still very much in it's infancy, and as such bugs are to be expected. If you do by chance happen to find a bug: 27 | 28 | 1. Submit a Bug Report [here](https://github.com/awadell1/Datamaster/issues/new) 29 | 2. Try to fix the bug your self if at all possible 30 | 3. If you do manage to fix the bug, please submit a pull request [here](https://github.com/awadell1/Datamaster/compare) 31 | 32 | ## Feature Request ## 33 | Datamaster is a new tool and likely is missing some of the features that you might want. If there's a feature that you'd like to see in a future release: 34 | 35 | 1. Submit a feature request [here](https://github.com/awadell1/Datamaster/issues/new) 36 | 2. Try to implement the feature yourself 37 | 3. If you do manage to implement the feature, please submit a pull request [here](https://github.com/awadell1/Datamaster/compare) 38 | -------------------------------------------------------------------------------- /SQLQueries/Channels.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE ChannelLog( 2 | id INTEGER PRIMARY KEY, 3 | entryId INTEGER NOT NULL, 4 | channelId INTEGER NOT NULL, 5 | FOREIGN KEY(entryId) REFERENCES MasterDirectory(id) 6 | FOREIGN KEY(channelId) REFERENCES ChannelName(id) 7 | ); 8 | 9 | CREATE TABLE ChannelName( 10 | id INTEGER PRIMARY KEY, 11 | channelName TEXT NOT NULL 12 | ) -------------------------------------------------------------------------------- /SQLQueries/DetailLog.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE DetailLog( 2 | id INTEGER PRIMARY KEY, 3 | entryId INTEGER NOT NULL, 4 | fieldId INTEGER NOT NULL, 5 | value NUMERIC NOT NULL, 6 | unit TEXT, 7 | FOREIGN KEY(fieldId) REFERENCES DetailName(id) 8 | FOREIGN KEY(entryId) REFERENCES MasterDirectory(id) 9 | ) -------------------------------------------------------------------------------- /SQLQueries/DetailName.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE DetailName( 2 | id INTEGER PRIMARY KEY, 3 | fieldName TEXT NOT NULL 4 | ) -------------------------------------------------------------------------------- /SQLQueries/MasterDirectory.sql: -------------------------------------------------------------------------------- 1 | /* 2 | SQLite Statements to recreate the database used by Datamaster 3 | */ 4 | 5 | --Delete Database 6 | DROP TABLE `masterDirectory`; 7 | DROP TABLE `DetailLog`; 8 | DROP TABLE `DetailName`; 9 | DROP TABLE `ChannelLog`; 10 | DROP TABLE `ChannelName`; 11 | 12 | 13 | --Turn on Foreign Keys for database 14 | PRAGMA foreign_keys = on; 15 | 16 | --Create Tables for Database 17 | CREATE TABLE `masterDirectory` ( 18 | `id` INTEGER, 19 | `ldId` TEXT NOT NULL UNIQUE, 20 | `ldxId` TEXT NOT NULL UNIQUE, 21 | `OriginHash` TEXT NOT NULL UNIQUE, 22 | `FinalHash` TEXT NOT NULL UNIQUE, 23 | `Datetime` TEXT, 24 | PRIMARY KEY(`id`) 25 | ); 26 | 27 | CREATE TABLE DetailLog( 28 | id INTEGER PRIMARY KEY, 29 | entryId INTEGER NOT NULL, 30 | fieldId INTEGER NOT NULL, 31 | value BLOB NOT NULL, 32 | unit TEXT, 33 | FOREIGN KEY(fieldId) REFERENCES DetailName(id) 34 | FOREIGN KEY(entryId) REFERENCES MasterDirectory(id) 35 | ); 36 | 37 | CREATE TABLE DetailName( 38 | id INTEGER PRIMARY KEY, 39 | fieldName TEXT NOT NULL 40 | ); 41 | 42 | CREATE TABLE ChannelLog( 43 | id INTEGER PRIMARY KEY, 44 | entryId INTEGER NOT NULL, 45 | channelId INTEGER NOT NULL, 46 | FOREIGN KEY(entryId) REFERENCES MasterDirectory(id) 47 | FOREIGN KEY(channelId) REFERENCES ChannelName(id) 48 | ); 49 | 50 | CREATE TABLE ChannelName( 51 | id INTEGER PRIMARY KEY, 52 | channelName TEXT NOT NULL 53 | ); 54 | 55 | INSERT INTO `ChannelName`(`id`,`channelName`) VALUES (NULL,'Engine_RPM'); 56 | INSERT INTO `DetailName`(`id`,`fieldName`) VALUES (NULL,'TotalLaps'); 57 | -------------------------------------------------------------------------------- /TestSuite/CheckBuild.m: -------------------------------------------------------------------------------- 1 | %% DatamasterWiki Location 2 | wiki = Datamaster.getConfigSetting('wiki'); 3 | 4 | %Get code to test 5 | exampleCode = getExampleCode(wiki); 6 | 7 | %Run Tests 8 | runtests([pwd; exampleCode(:, 1)]) 9 | 10 | %Clean up 11 | for i = 1:length(exampleCode) 12 | delete(exampleCode{i, 1}) 13 | end 14 | 15 | function exampleCode = getExampleCode(folder) 16 | %Scan Files for matlab code 17 | file = dir(folder); 18 | exampleCode = {}; 19 | for i = 1:length(file) 20 | path = fullfile(file(i).folder, file(i).name); 21 | %Ignore Upwards links 22 | if ~strcmp(file(i).name(1), '.') 23 | if file(i).isdir 24 | exampleCode = [exampleCode, getExampleCode(path)]; 25 | else 26 | %Read in file 27 | str = fileread(path); 28 | 29 | %Parse for Code marked for error checking with 'MATLAB' 30 | code = regexp(str, '```MATLAB(.+?)```', 'tokens'); 31 | 32 | for j = 1:length(code) 33 | % Generate Test Code Name 34 | [~, name] = fileparts(file(i).name); 35 | name = sprintf('%s_%02d.m', name, j); 36 | exampleCode{end+1, 1} = fullfile(tempdir, name); 37 | 38 | %Create File 39 | fid = fopen(exampleCode{end, 1}, 'w'); 40 | 41 | %Add Path to test script header 42 | fprintf(fid, '%s', code{j}{:}); 43 | 44 | %Save test script 45 | fclose(fid); 46 | end 47 | end 48 | end 49 | end 50 | end -------------------------------------------------------------------------------- /TestSuite/testDataReporter.m: -------------------------------------------------------------------------------- 1 | %% Constructor 2 | dr = DataReporter; 3 | assert(isa(dr, 'DataReporter')) 4 | 5 | %% Refresh Datastore 6 | dr = DataReporter; 7 | dr.RefreshDatastore; 8 | -------------------------------------------------------------------------------- /TestSuite/testDatamaster.m: -------------------------------------------------------------------------------- 1 | %% Create Datamaster Object 2 | dm = Datamaster; 3 | assert(isa(dm, 'Datamaster')) 4 | 5 | %Get Datastore path 6 | path1 = Datamaster.getConfigSetting('datastore_path'); 7 | path2 = dm.getDatastore; 8 | assert(strcmp(path1, path2)) 9 | 10 | % Check getDatasource 11 | ds = dm.getDatasource; 12 | assert(isa(ds, 'datasource')) 13 | 14 | %Search by Time 15 | ds1 = dm.getDatasource('StartDate', '2016-02-01', 'EndDate', '2016-03-01'); 16 | ds2 = dm.getDatasource('StartDate', '2016-02-01', 'EndDate', '2016-04-01'); 17 | ds3 = dm.getDatasource('StartDate', '2016-01-01', 'EndDate', '2016-03-01'); 18 | ds4 = dm.getDatasource('StartDate', '2016-03-01', 'EndDate', '2016-02-01'); 19 | 20 | 21 | assert(length(ds1) < length(ds2)); %Check EndDate 22 | assert(length(ds1) < length(ds3)); %Check StartDate 23 | assert(length(ds4) == 0); %Invalid Date range 24 | 25 | %Helper for picking a random datasource 26 | randDs = @(ds) floor(length(ds) * rand) +1; 27 | 28 | %Limit Results - Limit to random number of results 29 | num = randDs(ds); 30 | dsNum = dm.getDatasource('limit', num); 31 | assert(length(dsNum) == num); 32 | 33 | %Search by Channel - Single Channel 34 | ds = dm.getDatasource('channel', 'Engine_RPM'); 35 | channels = ds(randDs(ds)).getLogged; 36 | assert(any(strcmp('Engine_RPM', channels))); 37 | 38 | %Search by Multiple - Single Channel 39 | ds = dm.getDatasource('channel', {'Engine_RPM', 'Engine_Torque'}); 40 | channels = ds(randDs(ds)).getLogged; 41 | assert(any(strcmp('Engine_RPM', channels))); 42 | assert(any(strcmp('Engine_Torque', channels))); -------------------------------------------------------------------------------- /TestSuite/testGetDetail.m: -------------------------------------------------------------------------------- 1 | %% Get Datasource 2 | dm = Datamaster 3 | ds = dm.getDatasource('limit', 1); 4 | 5 | detail = ds.getDetail('Driver') 6 | assert(ischar(detail)); 7 | 8 | ds = dm.getDatasource('limit', 1); 9 | detail = ds.getDetail('FuelTankCapacity') 10 | assert(ischar(detail.Unit)) 11 | assert(ischar(detail.Value)) -------------------------------------------------------------------------------- /UnitDefine.txt: -------------------------------------------------------------------------------- 1 | # Definition of additional units for use 2 | 3 | # G Force 4 | G = gravity 5 | 6 | #Lambda Ratio 7 | LA = [] -------------------------------------------------------------------------------- /convertUnit.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awadell1/Datamaster/1cd51abd47c3178bb925c7b01014ee36f6b5f821/convertUnit.m -------------------------------------------------------------------------------- /default.ini: -------------------------------------------------------------------------------- 1 | #This File is used to store the default configuration settings used by Datamaster 2 | #DO NOT EDIT THIS FILE 3 | #Instead use config.ini to override the settings found here 4 | 5 | #Comment Style: # 6 | 7 | #Note On Formating: 8 | #Selections are noted with [Section Name] 9 | #Key-Values Statements are then given as: key=Value 10 | #When referencing a certain key in the comments use the format: Section:key 11 | 12 | #Replacement Flags 13 | #Some flags can be used to replace the flag: (%flag%) with something else. So ones of note follow: 14 | #%Datamaster% -> Replaced with the contents of Datastore:datastore_path 15 | 16 | 17 | [Datastore] 18 | datastore_path=\\en-projectteams.files.cornell.edu\en-projectteams\en-fsae\Data\Datastore 19 | master_directory_path=%datastore%\master_directory.sqlite 20 | 21 | [GoogleDriveLogin] 22 | client_id=MISSING_GET_FROM_GOOGLE 23 | client_secret=MISSING_GET_FROM_GOOGLE -------------------------------------------------------------------------------- /textprogressbar.m: -------------------------------------------------------------------------------- 1 | function textprogressbar(c, varargin) 2 | % This function creates a text progress bar. It should be called with a 3 | % STRING argument to initialize and terminate. Otherwise the number corresponding 4 | % to progress in % should be supplied. 5 | % INPUTS: C Either: Text string to initialize or terminate 6 | % Percentage number to show progress 7 | % new pass 2nd variable to reset text progressbar 8 | % OUTPUTS: N/A 9 | % Example: Please refer to demo_textprogressbar.m 10 | 11 | % Author: Paul Proteus (e-mail: proteus.paul (at) yahoo (dot) com) 12 | % Version: 1.0 13 | % Changes tracker: 29.06.2010 - First version 14 | 15 | % Inspired by: http://blogs.mathworks.com/loren/2007/08/01/monitoring-progress-of-a-calculation/ 16 | 17 | %% Initialization 18 | persistent strCR; % Carriage return persistent variable 19 | 20 | if nargin == 2 21 | strCR =[]; 22 | end 23 | 24 | % Visualization parameters 25 | strPercentageLength = 10; % Length of percentage string (must be >5) 26 | strDotsMaximum = 10; % The total number of dots in a progress bar 27 | 28 | %% Main 29 | 30 | if isempty(strCR) && ~ischar(c), 31 | % Progress bar must be initialized with a string 32 | error('The text progress must be initialized with a string'); 33 | elseif isempty(strCR) && ischar(c), 34 | % Progress bar - initialization 35 | fprintf('%s',c); 36 | strCR = -1; 37 | elseif ~isempty(strCR) && ischar(c), 38 | % Progress bar - termination 39 | strCR = []; 40 | fprintf([c '\n']); 41 | elseif isnumeric(c) 42 | % Progress bar - normal progress 43 | c = floor(c); 44 | percentageOut = [num2str(c) '%%']; 45 | percentageOut = [percentageOut repmat(' ',1,strPercentageLength-length(percentageOut)-1)]; 46 | nDots = floor(c/100*strDotsMaximum); 47 | dotOut = ['[' repmat('#',1,nDots) repmat(' ',1,strDotsMaximum-nDots) ']']; 48 | strOut = [percentageOut dotOut]; 49 | 50 | % Print it on the screen 51 | if strCR == -1, 52 | % Don't do carriage return during first run 53 | fprintf(strOut); 54 | else 55 | % Do it during all the other runs 56 | fprintf([strCR strOut]); 57 | end 58 | 59 | % Update carriage return 60 | strCR = repmat('\b',1,length(strOut)-1); 61 | 62 | else 63 | % Any other unexpected input 64 | error('Unsupported argument type'); 65 | end 66 | --------------------------------------------------------------------------------