├── .DS_Store
├── .gitignore
├── .travis.yml
├── LICENSE
├── README.md
├── Session.py
├── diff_match_patch.py
├── messages.json
├── messages
├── 0.0.1.txt
└── install.txt
├── out
├── remote-sublime.sublime-project
├── remote.py
├── remote.sublime-commands
├── tests
├── .DS_Store
└── test.py
└── travis.sh
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TeamRemote/remote-sublime/515cb427579cd9c02d035b9622906fc7de5446ef/.DS_Store
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | bin/
12 | build/
13 | develop-eggs/
14 | dist/
15 | eggs/
16 | lib/
17 | lib64/
18 | parts/
19 | sdist/
20 | var/
21 | *.egg-info/
22 | .installed.cfg
23 | *.egg
24 |
25 | # Installer logs
26 | pip-log.txt
27 | pip-delete-this-directory.txt
28 |
29 | # Unit test / coverage reports
30 | htmlcov/
31 | .tox/
32 | .coverage
33 | .cache
34 | nosetests.xml
35 | coverage.xml
36 |
37 | # Translations
38 | *.mo
39 |
40 | # Mr Developer
41 | .mr.developer.cfg
42 | .project
43 | .pydevproject
44 |
45 | # Rope
46 | .ropeproject
47 |
48 | # Django stuff:
49 | *.log
50 | *.pot
51 |
52 | # Sphinx documentation
53 | docs/_build/
54 |
55 | # SublimeText workspace
56 | *.sublime-workspace
57 | terminal.glue
58 |
59 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | #- "2.7"
4 | - "3.3"
5 |
6 | env:
7 | global:
8 | - PACKAGE="remote-sublime"
9 | matrix:
10 | #- SUBLIME_TEXT_VERSION="2"
11 | - SUBLIME_TEXT_VERSION="3"
12 |
13 | install:
14 | - sh -e travis.sh $SUBLIME_TEXT_VERSION $PACKAGE
15 |
16 | before_script:
17 | - export DISPLAY=:99.0
18 | - sh -e /etc/init.d/xvfb start
19 |
20 | script:
21 | - python $HOME/.config/sublime-text-$SUBLIME_TEXT_VERSION/Packages/UnitTesting/sbin/run.py $PACKAGE
22 |
23 | notifications:
24 | email: true
25 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Team Remote
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Remote Collab for SublimeText
2 | =============================
3 |
4 | [](https://travis-ci.org/TeamRemote/remote-sublime)
5 |
6 | Remote Collab is an open-source SublimeText plugin for remote pair programming, allowing multiple developers to work together on the same project in real-time.
7 |
8 | How to Install
9 | --------------
10 |
11 | #### Via Package Control
12 |
13 | The easiest way to install is using [Sublime Package Control](https://sublime.wbond.net/).
14 |
15 | 1. Open Command Palette using menu item `Tools -> Command Palette...` (⇧⌘P on Mac)
16 | 2. Choose `Package Control: Install Package`
17 | 3. Find `RemoteCollab` and hit Enter
18 |
19 | #### Manual
20 |
21 | You can also install Remote Collab manually:
22 |
23 | 1. Download the .zip or .tar.gz archive
24 | 2. Unzip and rename the folder to `RemoteCollab`
25 | 3. Copy the folder into `Packages` directory, which you can find using the menu item `Sublime Text -> Preferences -> Browse Packages...`
26 |
27 | How to Use
28 | ----------
29 |
30 | #### Host a session
31 |
32 | 1. Open the file you wish to collaboratively edit
33 | 2. Open Command Palette using menu item `Tools -> Command Palette...` (⇧⌘P on Mac)
34 | 3. Choose `Remote: Host Session`
35 | 4. You are now hosting a Remote Collab session. Give your IP address to the remote colleague you wish to collaborate with and they can join your session.
36 |
37 | #### Join a session
38 |
39 | 1. Open Command Palette using menu item `Tools -> Command Palette...` (⇧⌘P on Mac)
40 | 2. Choose `Remote: Connect to Session`
41 | 3. Enter the IP address of the host whose session you wish to connect to.
42 | 4. You are now collaboratively editing a document with the host!
43 |
44 | Team Remote
45 | -----------
46 |
47 | Team Remote is Hawk Weisman (@hawkw), Dibyojyoti Mukherjee (@dibyom), Soukaina Hamimoune (@hamimounes), and Andreas Bach Landgrebe (@grebes15). We are students at Allegheny College.
48 |
--------------------------------------------------------------------------------
/Session.py:
--------------------------------------------------------------------------------
1 | from . import diff_match_patch
2 | from collections import deque
3 | import socket
4 | import sublime
5 | import sublime_plugin
6 | import sys
7 | import threading
8 | import time
9 |
10 | ENCODING = "utf_8"
11 |
12 | def debug(message, exception = None):
13 | """
14 | Helper function for printing debug messages with timestamps and thread names.
15 | """
16 | t = time.localtime()
17 | stamp = "[{thread}-{id} {timestamp}] ".format(
18 | thread = threading.current_thread().name,
19 | id = threading.get_ident(),
20 | timestamp = time.strftime('%H:%M:%S')
21 | )
22 | print(stamp, message)
23 | if exception: print (stamp, exception)
24 |
25 |
26 | class Transmitter(threading.Thread):
27 | """
28 | Sends diffs over a socket.
29 | """
30 | def __init__(self, socket, parent):
31 | super(Transmitter, self).__init__()
32 | self.name = "({s}-{id}) Transmitter".format(
33 | s = parent.name,
34 | id = parent.ident,
35 | )
36 | self.socket = socket
37 | self.parent = parent
38 | self.queue = deque([])
39 |
40 | def transmit(self, diff):
41 | """
42 | Function called by other threads to enqueue a diff in this transmitter's
43 | send buffer.
44 | """
45 | self.queue.append(diff)
46 | debug("transmitter enqueued {d}".format(d = diff))
47 |
48 | def run(self):
49 | debug ("started")
50 | while True:
51 | if self.queue:
52 | # Pop off the first item in the queue and encode it
53 | data = self.queue.popleft().encode(ENCODING)
54 | # send the message over the socket
55 | self.socket.send(data)
56 | debug("sent patch over socket {s}".format(s = self.socket))
57 | debug("queue: {q}".format(q = self.queue))
58 |
59 | class Reciever (threading.Thread):
60 | """
61 | Listens on a socket and patches the view with the recieved diffs.
62 | """
63 | def __init__(self, socket, parent):
64 | super(Reciever, self).__init__()
65 | self.name = "({s}-{id}) Reciever".format(
66 | s = parent.name,
67 | id = parent.ident,
68 | )
69 | self.socket = socket
70 | self.parent = parent
71 |
72 | def run(self):
73 | debug ("started")
74 | while True:
75 | data = self.socket.recv(4096)
76 | if data: #If we've recieved data on our socket...
77 | debug ("recieved data: {d}".format(d = data))
78 | data = data.decode(ENCODING) # decode the patch
79 | self.parent.patch_view(data) # patch the session's view
80 |
81 | class Session(threading.Thread):
82 |
83 | def __init__(self, view, host=None):
84 | """
85 | Constructor for a Session. Host is the IP address of the host we are connecting to.
86 | If we are the host, host should equal 'None'.
87 | """
88 | super(Session, self).__init__()
89 | self.name = "Host" if host is None else "Client"
90 | self.port = 12345 # This should be set from prefs file later
91 | self.view = view
92 | self.shadow = self.get_buffer()
93 | self.dmp = diff_match_patch.diff_match_patch()
94 | self.dmp.Diff_Timeout = 0
95 | self.transmitter = None
96 | self.reciever = None
97 | self.socket = None
98 | self.init_socket = None
99 | self.host = host
100 | self.start()
101 |
102 | def run(self):
103 | try:
104 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
105 | if self.host is None: # if the remote host is None, we are the host.
106 | sock.bind(('',self.port))
107 | debug("bound socket, listening for remote")
108 | sock.listen(1)
109 | else:
110 | debug("connecting to {h}:{p}".format(h = self.host, p = self.port))
111 | sock.connect((self.host, self.port))
112 | except OSError as e:
113 | if sock:
114 | sock.close()
115 | debug("Error opening socket: ", e)
116 | else:
117 | if self.host is None: # If we are the host...
118 | conn, address = sock.accept() # accept a connection from the remote
119 | debug ("Accepted connection from {a}".format (a = address))
120 | self.reciever = Reciever(conn, self) # start a reciever for the remote
121 | self.transmitter = Transmitter(conn, self) # start a transmitter for the remote
122 | self.transmitter.start()
123 | self.reciever.start()
124 | self.initial_patch() # send the initial patch to the remote
125 | self.socket = conn # our socket is the socket bound to the remote
126 | self.init_socket = sock # keep a reference to the initial socket for cleanup
127 | else: # Otherwise, we're the remote
128 | self.reciever = Reciever (sock, self) # start a reciever for the host
129 | self.transmitter = Transmitter (sock, self) # and a transmitter
130 | self.reciever.start()
131 | self.transmitter.start()
132 | self.socket = sock
133 |
134 | def initial_patch(self):
135 | """
136 | Sends the initial buffer contents to the remote. This is only called if we are the host.
137 | """
138 | diffs = self.dmp.diff_main('', self.shadow)
139 | patch = self.dmp.patch_make('', diffs)
140 | debug ("Made initial patch.")
141 | self.transmitter.transmit(self.dmp.patch_toText(patch))
142 | debug ("Sent initial patch to remote.")
143 |
144 | def send_diffs(self, new_buffer):
145 | """
146 | Sends diffs to the other peer over the current connection and sets the
147 | current buffer to the local shadow.
148 | """
149 | diffs = self.dmp.diff_main(self.shadow, new_buffer)
150 | debug ("Made diffs: {d}".format(d = diffs))
151 | patch = self.dmp.patch_make(self.shadow, diffs)
152 | debug ("Made patch: {p}".format(p = diffs))
153 | self.transmitter.transmit(self.dmp.patch_toText(patch))
154 | self.shadow = new_buffer
155 |
156 | def patch_view (self, data):
157 | """
158 | Patches this session's bound text buffer with a patch recieved from the remote peer.
159 | FIXME: thhis doesn't work correctly.
160 | """
161 | patch = self.dmp.patch_fromText(data)
162 | shadow, shadow_results = self.dmp.patch_apply(patch, self.shadow)
163 | if False in shadow_results:
164 | debug ("Patch application failed, buffer may be out of sync")
165 | else:
166 | self.shadow = shadow
167 | try:
168 | self.view.run_command("update_buffer", {"new_buffer": self.shadow})
169 | except Exception as e:
170 | debug ("Error occured while editing buffer ", e)
171 |
172 | def close(self):
173 | """
174 | Closes a session, releasing both the socket and the initial socket
175 | if they are available.
176 | """
177 | if self.socket is not None:
178 | debug ("Closing and shutting down socket {s}".format(self.socket))
179 | self.socket.shutdown()
180 | self.socket.close()
181 | debug ("Closed socket.")
182 | if self.init_socket is not None:
183 | debug ("Closing and shutting down handshake socket {s}".format(self.init_socket))
184 | self.socket.shutdown()
185 | self.init_socket.close()
186 | debug ("Closed socket.")
187 |
188 | def get_buffer(self):
189 | return self.view.substr(sublime.Region(0, self.view.size()))
190 |
--------------------------------------------------------------------------------
/diff_match_patch.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | """Diff Match and Patch
4 |
5 | Copyright 2006 Google Inc.
6 | http://code.google.com/p/google-diff-match-patch/
7 |
8 | Licensed under the Apache License, Version 2.0 (the "License");
9 | you may not use this file except in compliance with the License.
10 | You may obtain a copy of the License at
11 |
12 | http://www.apache.org/licenses/LICENSE-2.0
13 |
14 | Unless required by applicable law or agreed to in writing, software
15 | distributed under the License is distributed on an "AS IS" BASIS,
16 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | See the License for the specific language governing permissions and
18 | limitations under the License.
19 | """
20 |
21 | """Functions for diff, match and patch.
22 |
23 | Computes the difference between two texts to create a patch.
24 | Applies the patch onto another text, allowing for errors.
25 | """
26 |
27 | __author__ = 'fraser@google.com (Neil Fraser)'
28 |
29 | import math
30 | import re
31 | import sys
32 | import time
33 | import urllib.parse
34 |
35 | class diff_match_patch:
36 | """Class containing the diff, match and patch methods.
37 |
38 | Also contains the behaviour settings.
39 | """
40 |
41 | def __init__(self):
42 | """Inits a diff_match_patch object with default settings.
43 | Redefine these in your program to override the defaults.
44 | """
45 |
46 | # Number of seconds to map a diff before giving up (0 for infinity).
47 | self.Diff_Timeout = 1.0
48 | # Cost of an empty edit operation in terms of edit characters.
49 | self.Diff_EditCost = 4
50 | # At what point is no match declared (0.0 = perfection, 1.0 = very loose).
51 | self.Match_Threshold = 0.5
52 | # How far to search for a match (0 = exact location, 1000+ = broad match).
53 | # A match this many characters away from the expected location will add
54 | # 1.0 to the score (0.0 is a perfect match).
55 | self.Match_Distance = 1000
56 | # When deleting a large block of text (over ~64 characters), how close do
57 | # the contents have to be to match the expected contents. (0.0 = perfection,
58 | # 1.0 = very loose). Note that Match_Threshold controls how closely the
59 | # end points of a delete need to match.
60 | self.Patch_DeleteThreshold = 0.5
61 | # Chunk size for context length.
62 | self.Patch_Margin = 4
63 |
64 | # The number of bits in an int.
65 | # Python has no maximum, thus to disable patch splitting set to 0.
66 | # However to avoid long patches in certain pathological cases, use 32.
67 | # Multiple short patches (using native ints) are much faster than long ones.
68 | self.Match_MaxBits = 32
69 |
70 | # DIFF FUNCTIONS
71 |
72 | # The data structure representing a diff is an array of tuples:
73 | # [(DIFF_DELETE, "Hello"), (DIFF_INSERT, "Goodbye"), (DIFF_EQUAL, " world.")]
74 | # which means: delete "Hello", add "Goodbye" and keep " world."
75 | DIFF_DELETE = -1
76 | DIFF_INSERT = 1
77 | DIFF_EQUAL = 0
78 |
79 | def diff_main(self, text1, text2, checklines=True, deadline=None):
80 | """Find the differences between two texts. Simplifies the problem by
81 | stripping any common prefix or suffix off the texts before diffing.
82 |
83 | Args:
84 | text1: Old string to be diffed.
85 | text2: New string to be diffed.
86 | checklines: Optional speedup flag. If present and false, then don't run
87 | a line-level diff first to identify the changed areas.
88 | Defaults to true, which does a faster, slightly less optimal diff.
89 | deadline: Optional time when the diff should be complete by. Used
90 | internally for recursive calls. Users should set DiffTimeout instead.
91 |
92 | Returns:
93 | Array of changes.
94 | """
95 | # Set a deadline by which time the diff must be complete.
96 | if deadline == None:
97 | # Unlike in most languages, Python counts time in seconds.
98 | if self.Diff_Timeout <= 0:
99 | deadline = sys.maxsize
100 | else:
101 | deadline = time.time() + self.Diff_Timeout
102 |
103 | # Check for null inputs.
104 | if text1 == None or text2 == None:
105 | raise ValueError("Null inputs. (diff_main)")
106 |
107 | # Check for equality (speedup).
108 | if text1 == text2:
109 | if text1:
110 | return [(self.DIFF_EQUAL, text1)]
111 | return []
112 |
113 | # Trim off common prefix (speedup).
114 | commonlength = self.diff_commonPrefix(text1, text2)
115 | commonprefix = text1[:commonlength]
116 | text1 = text1[commonlength:]
117 | text2 = text2[commonlength:]
118 |
119 | # Trim off common suffix (speedup).
120 | commonlength = self.diff_commonSuffix(text1, text2)
121 | if commonlength == 0:
122 | commonsuffix = ''
123 | else:
124 | commonsuffix = text1[-commonlength:]
125 | text1 = text1[:-commonlength]
126 | text2 = text2[:-commonlength]
127 |
128 | # Compute the diff on the middle block.
129 | diffs = self.diff_compute(text1, text2, checklines, deadline)
130 |
131 | # Restore the prefix and suffix.
132 | if commonprefix:
133 | diffs[:0] = [(self.DIFF_EQUAL, commonprefix)]
134 | if commonsuffix:
135 | diffs.append((self.DIFF_EQUAL, commonsuffix))
136 | self.diff_cleanupMerge(diffs)
137 | return diffs
138 |
139 | def diff_compute(self, text1, text2, checklines, deadline):
140 | """Find the differences between two texts. Assumes that the texts do not
141 | have any common prefix or suffix.
142 |
143 | Args:
144 | text1: Old string to be diffed.
145 | text2: New string to be diffed.
146 | checklines: Speedup flag. If false, then don't run a line-level diff
147 | first to identify the changed areas.
148 | If true, then run a faster, slightly less optimal diff.
149 | deadline: Time when the diff should be complete by.
150 |
151 | Returns:
152 | Array of changes.
153 | """
154 | if not text1:
155 | # Just add some text (speedup).
156 | return [(self.DIFF_INSERT, text2)]
157 |
158 | if not text2:
159 | # Just delete some text (speedup).
160 | return [(self.DIFF_DELETE, text1)]
161 |
162 | if len(text1) > len(text2):
163 | (longtext, shorttext) = (text1, text2)
164 | else:
165 | (shorttext, longtext) = (text1, text2)
166 | i = longtext.find(shorttext)
167 | if i != -1:
168 | # Shorter text is inside the longer text (speedup).
169 | diffs = [(self.DIFF_INSERT, longtext[:i]), (self.DIFF_EQUAL, shorttext),
170 | (self.DIFF_INSERT, longtext[i + len(shorttext):])]
171 | # Swap insertions for deletions if diff is reversed.
172 | if len(text1) > len(text2):
173 | diffs[0] = (self.DIFF_DELETE, diffs[0][1])
174 | diffs[2] = (self.DIFF_DELETE, diffs[2][1])
175 | return diffs
176 |
177 | if len(shorttext) == 1:
178 | # Single character string.
179 | # After the previous speedup, the character can't be an equality.
180 | return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
181 |
182 | # Check to see if the problem can be split in two.
183 | hm = self.diff_halfMatch(text1, text2)
184 | if hm:
185 | # A half-match was found, sort out the return data.
186 | (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
187 | # Send both pairs off for separate processing.
188 | diffs_a = self.diff_main(text1_a, text2_a, checklines, deadline)
189 | diffs_b = self.diff_main(text1_b, text2_b, checklines, deadline)
190 | # Merge the results.
191 | return diffs_a + [(self.DIFF_EQUAL, mid_common)] + diffs_b
192 |
193 | if checklines and len(text1) > 100 and len(text2) > 100:
194 | return self.diff_lineMode(text1, text2, deadline)
195 |
196 | return self.diff_bisect(text1, text2, deadline)
197 |
198 | def diff_lineMode(self, text1, text2, deadline):
199 | """Do a quick line-level diff on both strings, then rediff the parts for
200 | greater accuracy.
201 | This speedup can produce non-minimal diffs.
202 |
203 | Args:
204 | text1: Old string to be diffed.
205 | text2: New string to be diffed.
206 | deadline: Time when the diff should be complete by.
207 |
208 | Returns:
209 | Array of changes.
210 | """
211 |
212 | # Scan the text on a line-by-line basis first.
213 | (text1, text2, linearray) = self.diff_linesToChars(text1, text2)
214 |
215 | diffs = self.diff_main(text1, text2, False, deadline)
216 |
217 | # Convert the diff back to original text.
218 | self.diff_charsToLines(diffs, linearray)
219 | # Eliminate freak matches (e.g. blank lines)
220 | self.diff_cleanupSemantic(diffs)
221 |
222 | # Rediff any replacement blocks, this time character-by-character.
223 | # Add a dummy entry at the end.
224 | diffs.append((self.DIFF_EQUAL, ''))
225 | pointer = 0
226 | count_delete = 0
227 | count_insert = 0
228 | text_delete = ''
229 | text_insert = ''
230 | while pointer < len(diffs):
231 | if diffs[pointer][0] == self.DIFF_INSERT:
232 | count_insert += 1
233 | text_insert += diffs[pointer][1]
234 | elif diffs[pointer][0] == self.DIFF_DELETE:
235 | count_delete += 1
236 | text_delete += diffs[pointer][1]
237 | elif diffs[pointer][0] == self.DIFF_EQUAL:
238 | # Upon reaching an equality, check for prior redundancies.
239 | if count_delete >= 1 and count_insert >= 1:
240 | # Delete the offending records and add the merged ones.
241 | a = self.diff_main(text_delete, text_insert, False, deadline)
242 | diffs[pointer - count_delete - count_insert : pointer] = a
243 | pointer = pointer - count_delete - count_insert + len(a)
244 | count_insert = 0
245 | count_delete = 0
246 | text_delete = ''
247 | text_insert = ''
248 |
249 | pointer += 1
250 |
251 | diffs.pop() # Remove the dummy entry at the end.
252 |
253 | return diffs
254 |
255 | def diff_bisect(self, text1, text2, deadline):
256 | """Find the 'middle snake' of a diff, split the problem in two
257 | and return the recursively constructed diff.
258 | See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
259 |
260 | Args:
261 | text1: Old string to be diffed.
262 | text2: New string to be diffed.
263 | deadline: Time at which to bail if not yet complete.
264 |
265 | Returns:
266 | Array of diff tuples.
267 | """
268 |
269 | # Cache the text lengths to prevent multiple calls.
270 | text1_length = len(text1)
271 | text2_length = len(text2)
272 | max_d = (text1_length + text2_length + 1) // 2
273 | v_offset = max_d
274 | v_length = 2 * max_d
275 | v1 = [-1] * v_length
276 | v1[v_offset + 1] = 0
277 | v2 = v1[:]
278 | delta = text1_length - text2_length
279 | # If the total number of characters is odd, then the front path will
280 | # collide with the reverse path.
281 | front = (delta % 2 != 0)
282 | # Offsets for start and end of k loop.
283 | # Prevents mapping of space beyond the grid.
284 | k1start = 0
285 | k1end = 0
286 | k2start = 0
287 | k2end = 0
288 | for d in range(max_d):
289 | # Bail out if deadline is reached.
290 | if time.time() > deadline:
291 | break
292 |
293 | # Walk the front path one step.
294 | for k1 in range(-d + k1start, d + 1 - k1end, 2):
295 | k1_offset = v_offset + k1
296 | if k1 == -d or (k1 != d and
297 | v1[k1_offset - 1] < v1[k1_offset + 1]):
298 | x1 = v1[k1_offset + 1]
299 | else:
300 | x1 = v1[k1_offset - 1] + 1
301 | y1 = x1 - k1
302 | while (x1 < text1_length and y1 < text2_length and
303 | text1[x1] == text2[y1]):
304 | x1 += 1
305 | y1 += 1
306 | v1[k1_offset] = x1
307 | if x1 > text1_length:
308 | # Ran off the right of the graph.
309 | k1end += 2
310 | elif y1 > text2_length:
311 | # Ran off the bottom of the graph.
312 | k1start += 2
313 | elif front:
314 | k2_offset = v_offset + delta - k1
315 | if k2_offset >= 0 and k2_offset < v_length and v2[k2_offset] != -1:
316 | # Mirror x2 onto top-left coordinate system.
317 | x2 = text1_length - v2[k2_offset]
318 | if x1 >= x2:
319 | # Overlap detected.
320 | return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
321 |
322 | # Walk the reverse path one step.
323 | for k2 in range(-d + k2start, d + 1 - k2end, 2):
324 | k2_offset = v_offset + k2
325 | if k2 == -d or (k2 != d and
326 | v2[k2_offset - 1] < v2[k2_offset + 1]):
327 | x2 = v2[k2_offset + 1]
328 | else:
329 | x2 = v2[k2_offset - 1] + 1
330 | y2 = x2 - k2
331 | while (x2 < text1_length and y2 < text2_length and
332 | text1[-x2 - 1] == text2[-y2 - 1]):
333 | x2 += 1
334 | y2 += 1
335 | v2[k2_offset] = x2
336 | if x2 > text1_length:
337 | # Ran off the left of the graph.
338 | k2end += 2
339 | elif y2 > text2_length:
340 | # Ran off the top of the graph.
341 | k2start += 2
342 | elif not front:
343 | k1_offset = v_offset + delta - k2
344 | if k1_offset >= 0 and k1_offset < v_length and v1[k1_offset] != -1:
345 | x1 = v1[k1_offset]
346 | y1 = v_offset + x1 - k1_offset
347 | # Mirror x2 onto top-left coordinate system.
348 | x2 = text1_length - x2
349 | if x1 >= x2:
350 | # Overlap detected.
351 | return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
352 |
353 | # Diff took too long and hit the deadline or
354 | # number of diffs equals number of characters, no commonality at all.
355 | return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
356 |
357 | def diff_bisectSplit(self, text1, text2, x, y, deadline):
358 | """Given the location of the 'middle snake', split the diff in two parts
359 | and recurse.
360 |
361 | Args:
362 | text1: Old string to be diffed.
363 | text2: New string to be diffed.
364 | x: Index of split point in text1.
365 | y: Index of split point in text2.
366 | deadline: Time at which to bail if not yet complete.
367 |
368 | Returns:
369 | Array of diff tuples.
370 | """
371 | text1a = text1[:x]
372 | text2a = text2[:y]
373 | text1b = text1[x:]
374 | text2b = text2[y:]
375 |
376 | # Compute both diffs serially.
377 | diffs = self.diff_main(text1a, text2a, False, deadline)
378 | diffsb = self.diff_main(text1b, text2b, False, deadline)
379 |
380 | return diffs + diffsb
381 |
382 | def diff_linesToChars(self, text1, text2):
383 | """Split two texts into an array of strings. Reduce the texts to a string
384 | of hashes where each Unicode character represents one line.
385 |
386 | Args:
387 | text1: First string.
388 | text2: Second string.
389 |
390 | Returns:
391 | Three element tuple, containing the encoded text1, the encoded text2 and
392 | the array of unique strings. The zeroth element of the array of unique
393 | strings is intentionally blank.
394 | """
395 | lineArray = [] # e.g. lineArray[4] == "Hello\n"
396 | lineHash = {} # e.g. lineHash["Hello\n"] == 4
397 |
398 | # "\x00" is a valid character, but various debuggers don't like it.
399 | # So we'll insert a junk entry to avoid generating a null character.
400 | lineArray.append('')
401 |
402 | def diff_linesToCharsMunge(text):
403 | """Split a text into an array of strings. Reduce the texts to a string
404 | of hashes where each Unicode character represents one line.
405 | Modifies linearray and linehash through being a closure.
406 |
407 | Args:
408 | text: String to encode.
409 |
410 | Returns:
411 | Encoded string.
412 | """
413 | chars = []
414 | # Walk the text, pulling out a substring for each line.
415 | # text.split('\n') would would temporarily double our memory footprint.
416 | # Modifying text would create many large strings to garbage collect.
417 | lineStart = 0
418 | lineEnd = -1
419 | while lineEnd < len(text) - 1:
420 | lineEnd = text.find('\n', lineStart)
421 | if lineEnd == -1:
422 | lineEnd = len(text) - 1
423 | line = text[lineStart:lineEnd + 1]
424 | lineStart = lineEnd + 1
425 |
426 | if line in lineHash:
427 | chars.append(chr(lineHash[line]))
428 | else:
429 | lineArray.append(line)
430 | lineHash[line] = len(lineArray) - 1
431 | chars.append(chr(len(lineArray) - 1))
432 | return "".join(chars)
433 |
434 | chars1 = diff_linesToCharsMunge(text1)
435 | chars2 = diff_linesToCharsMunge(text2)
436 | return (chars1, chars2, lineArray)
437 |
438 | def diff_charsToLines(self, diffs, lineArray):
439 | """Rehydrate the text in a diff from a string of line hashes to real lines
440 | of text.
441 |
442 | Args:
443 | diffs: Array of diff tuples.
444 | lineArray: Array of unique strings.
445 | """
446 | for x in range(len(diffs)):
447 | text = []
448 | for char in diffs[x][1]:
449 | text.append(lineArray[ord(char)])
450 | diffs[x] = (diffs[x][0], "".join(text))
451 |
452 | def diff_commonPrefix(self, text1, text2):
453 | """Determine the common prefix of two strings.
454 |
455 | Args:
456 | text1: First string.
457 | text2: Second string.
458 |
459 | Returns:
460 | The number of characters common to the start of each string.
461 | """
462 | # Quick check for common null cases.
463 | if not text1 or not text2 or text1[0] != text2[0]:
464 | return 0
465 | # Binary search.
466 | # Performance analysis: http://neil.fraser.name/news/2007/10/09/
467 | pointermin = 0
468 | pointermax = min(len(text1), len(text2))
469 | pointermid = pointermax
470 | pointerstart = 0
471 | while pointermin < pointermid:
472 | if text1[pointerstart:pointermid] == text2[pointerstart:pointermid]:
473 | pointermin = pointermid
474 | pointerstart = pointermin
475 | else:
476 | pointermax = pointermid
477 | pointermid = (pointermax - pointermin) // 2 + pointermin
478 | return pointermid
479 |
480 | def diff_commonSuffix(self, text1, text2):
481 | """Determine the common suffix of two strings.
482 |
483 | Args:
484 | text1: First string.
485 | text2: Second string.
486 |
487 | Returns:
488 | The number of characters common to the end of each string.
489 | """
490 | # Quick check for common null cases.
491 | if not text1 or not text2 or text1[-1] != text2[-1]:
492 | return 0
493 | # Binary search.
494 | # Performance analysis: http://neil.fraser.name/news/2007/10/09/
495 | pointermin = 0
496 | pointermax = min(len(text1), len(text2))
497 | pointermid = pointermax
498 | pointerend = 0
499 | while pointermin < pointermid:
500 | if (text1[-pointermid:len(text1) - pointerend] ==
501 | text2[-pointermid:len(text2) - pointerend]):
502 | pointermin = pointermid
503 | pointerend = pointermin
504 | else:
505 | pointermax = pointermid
506 | pointermid = (pointermax - pointermin) // 2 + pointermin
507 | return pointermid
508 |
509 | def diff_commonOverlap(self, text1, text2):
510 | """Determine if the suffix of one string is the prefix of another.
511 |
512 | Args:
513 | text1 First string.
514 | text2 Second string.
515 |
516 | Returns:
517 | The number of characters common to the end of the first
518 | string and the start of the second string.
519 | """
520 | # Cache the text lengths to prevent multiple calls.
521 | text1_length = len(text1)
522 | text2_length = len(text2)
523 | # Eliminate the null case.
524 | if text1_length == 0 or text2_length == 0:
525 | return 0
526 | # Truncate the longer string.
527 | if text1_length > text2_length:
528 | text1 = text1[-text2_length:]
529 | elif text1_length < text2_length:
530 | text2 = text2[:text1_length]
531 | text_length = min(text1_length, text2_length)
532 | # Quick check for the worst case.
533 | if text1 == text2:
534 | return text_length
535 |
536 | # Start by looking for a single character match
537 | # and increase length until no match is found.
538 | # Performance analysis: http://neil.fraser.name/news/2010/11/04/
539 | best = 0
540 | length = 1
541 | while True:
542 | pattern = text1[-length:]
543 | found = text2.find(pattern)
544 | if found == -1:
545 | return best
546 | length += found
547 | if found == 0 or text1[-length:] == text2[:length]:
548 | best = length
549 | length += 1
550 |
551 | def diff_halfMatch(self, text1, text2):
552 | """Do the two texts share a substring which is at least half the length of
553 | the longer text?
554 | This speedup can produce non-minimal diffs.
555 |
556 | Args:
557 | text1: First string.
558 | text2: Second string.
559 |
560 | Returns:
561 | Five element Array, containing the prefix of text1, the suffix of text1,
562 | the prefix of text2, the suffix of text2 and the common middle. Or None
563 | if there was no match.
564 | """
565 | if self.Diff_Timeout <= 0:
566 | # Don't risk returning a non-optimal diff if we have unlimited time.
567 | return None
568 | if len(text1) > len(text2):
569 | (longtext, shorttext) = (text1, text2)
570 | else:
571 | (shorttext, longtext) = (text1, text2)
572 | if len(longtext) < 4 or len(shorttext) * 2 < len(longtext):
573 | return None # Pointless.
574 |
575 | def diff_halfMatchI(longtext, shorttext, i):
576 | """Does a substring of shorttext exist within longtext such that the
577 | substring is at least half the length of longtext?
578 | Closure, but does not reference any external variables.
579 |
580 | Args:
581 | longtext: Longer string.
582 | shorttext: Shorter string.
583 | i: Start index of quarter length substring within longtext.
584 |
585 | Returns:
586 | Five element Array, containing the prefix of longtext, the suffix of
587 | longtext, the prefix of shorttext, the suffix of shorttext and the
588 | common middle. Or None if there was no match.
589 | """
590 | seed = longtext[i:i + len(longtext) // 4]
591 | best_common = ''
592 | j = shorttext.find(seed)
593 | while j != -1:
594 | prefixLength = self.diff_commonPrefix(longtext[i:], shorttext[j:])
595 | suffixLength = self.diff_commonSuffix(longtext[:i], shorttext[:j])
596 | if len(best_common) < suffixLength + prefixLength:
597 | best_common = (shorttext[j - suffixLength:j] +
598 | shorttext[j:j + prefixLength])
599 | best_longtext_a = longtext[:i - suffixLength]
600 | best_longtext_b = longtext[i + prefixLength:]
601 | best_shorttext_a = shorttext[:j - suffixLength]
602 | best_shorttext_b = shorttext[j + prefixLength:]
603 | j = shorttext.find(seed, j + 1)
604 |
605 | if len(best_common) * 2 >= len(longtext):
606 | return (best_longtext_a, best_longtext_b,
607 | best_shorttext_a, best_shorttext_b, best_common)
608 | else:
609 | return None
610 |
611 | # First check if the second quarter is the seed for a half-match.
612 | hm1 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 3) // 4)
613 | # Check again based on the third quarter.
614 | hm2 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 1) // 2)
615 | if not hm1 and not hm2:
616 | return None
617 | elif not hm2:
618 | hm = hm1
619 | elif not hm1:
620 | hm = hm2
621 | else:
622 | # Both matched. Select the longest.
623 | if len(hm1[4]) > len(hm2[4]):
624 | hm = hm1
625 | else:
626 | hm = hm2
627 |
628 | # A half-match was found, sort out the return data.
629 | if len(text1) > len(text2):
630 | (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
631 | else:
632 | (text2_a, text2_b, text1_a, text1_b, mid_common) = hm
633 | return (text1_a, text1_b, text2_a, text2_b, mid_common)
634 |
635 | def diff_cleanupSemantic(self, diffs):
636 | """Reduce the number of edits by eliminating semantically trivial
637 | equalities.
638 |
639 | Args:
640 | diffs: Array of diff tuples.
641 | """
642 | changes = False
643 | equalities = [] # Stack of indices where equalities are found.
644 | lastequality = None # Always equal to diffs[equalities[-1]][1]
645 | pointer = 0 # Index of current position.
646 | # Number of chars that changed prior to the equality.
647 | length_insertions1, length_deletions1 = 0, 0
648 | # Number of chars that changed after the equality.
649 | length_insertions2, length_deletions2 = 0, 0
650 | while pointer < len(diffs):
651 | if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
652 | equalities.append(pointer)
653 | length_insertions1, length_insertions2 = length_insertions2, 0
654 | length_deletions1, length_deletions2 = length_deletions2, 0
655 | lastequality = diffs[pointer][1]
656 | else: # An insertion or deletion.
657 | if diffs[pointer][0] == self.DIFF_INSERT:
658 | length_insertions2 += len(diffs[pointer][1])
659 | else:
660 | length_deletions2 += len(diffs[pointer][1])
661 | # Eliminate an equality that is smaller or equal to the edits on both
662 | # sides of it.
663 | if (lastequality and (len(lastequality) <=
664 | max(length_insertions1, length_deletions1)) and
665 | (len(lastequality) <= max(length_insertions2, length_deletions2))):
666 | # Duplicate record.
667 | diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
668 | # Change second copy to insert.
669 | diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
670 | diffs[equalities[-1] + 1][1])
671 | # Throw away the equality we just deleted.
672 | equalities.pop()
673 | # Throw away the previous equality (it needs to be reevaluated).
674 | if len(equalities):
675 | equalities.pop()
676 | if len(equalities):
677 | pointer = equalities[-1]
678 | else:
679 | pointer = -1
680 | # Reset the counters.
681 | length_insertions1, length_deletions1 = 0, 0
682 | length_insertions2, length_deletions2 = 0, 0
683 | lastequality = None
684 | changes = True
685 | pointer += 1
686 |
687 | # Normalize the diff.
688 | if changes:
689 | self.diff_cleanupMerge(diffs)
690 | self.diff_cleanupSemanticLossless(diffs)
691 |
692 | # Find any overlaps between deletions and insertions.
693 | # e.g: abcxxxxxxdef
694 | # -> abcxxxdef
695 | # e.g: xxxabcdefxxx
696 | # -> defxxxabc
697 | # Only extract an overlap if it is as big as the edit ahead or behind it.
698 | pointer = 1
699 | while pointer < len(diffs):
700 | if (diffs[pointer - 1][0] == self.DIFF_DELETE and
701 | diffs[pointer][0] == self.DIFF_INSERT):
702 | deletion = diffs[pointer - 1][1]
703 | insertion = diffs[pointer][1]
704 | overlap_length1 = self.diff_commonOverlap(deletion, insertion)
705 | overlap_length2 = self.diff_commonOverlap(insertion, deletion)
706 | if overlap_length1 >= overlap_length2:
707 | if (overlap_length1 >= len(deletion) / 2.0 or
708 | overlap_length1 >= len(insertion) / 2.0):
709 | # Overlap found. Insert an equality and trim the surrounding edits.
710 | diffs.insert(pointer, (self.DIFF_EQUAL,
711 | insertion[:overlap_length1]))
712 | diffs[pointer - 1] = (self.DIFF_DELETE,
713 | deletion[:len(deletion) - overlap_length1])
714 | diffs[pointer + 1] = (self.DIFF_INSERT,
715 | insertion[overlap_length1:])
716 | pointer += 1
717 | else:
718 | if (overlap_length2 >= len(deletion) / 2.0 or
719 | overlap_length2 >= len(insertion) / 2.0):
720 | # Reverse overlap found.
721 | # Insert an equality and swap and trim the surrounding edits.
722 | diffs.insert(pointer, (self.DIFF_EQUAL, deletion[:overlap_length2]))
723 | diffs[pointer - 1] = (self.DIFF_INSERT,
724 | insertion[:len(insertion) - overlap_length2])
725 | diffs[pointer + 1] = (self.DIFF_DELETE, deletion[overlap_length2:])
726 | pointer += 1
727 | pointer += 1
728 | pointer += 1
729 |
730 | def diff_cleanupSemanticLossless(self, diffs):
731 | """Look for single edits surrounded on both sides by equalities
732 | which can be shifted sideways to align the edit to a word boundary.
733 | e.g: The cat came. -> The cat came.
734 |
735 | Args:
736 | diffs: Array of diff tuples.
737 | """
738 |
739 | def diff_cleanupSemanticScore(one, two):
740 | """Given two strings, compute a score representing whether the
741 | internal boundary falls on logical boundaries.
742 | Scores range from 6 (best) to 0 (worst).
743 | Closure, but does not reference any external variables.
744 |
745 | Args:
746 | one: First string.
747 | two: Second string.
748 |
749 | Returns:
750 | The score.
751 | """
752 | if not one or not two:
753 | # Edges are the best.
754 | return 6
755 |
756 | # Each port of this function behaves slightly differently due to
757 | # subtle differences in each language's definition of things like
758 | # 'whitespace'. Since this function's purpose is largely cosmetic,
759 | # the choice has been made to use each language's native features
760 | # rather than force total conformity.
761 | char1 = one[-1]
762 | char2 = two[0]
763 | nonAlphaNumeric1 = not char1.isalnum()
764 | nonAlphaNumeric2 = not char2.isalnum()
765 | whitespace1 = nonAlphaNumeric1 and char1.isspace()
766 | whitespace2 = nonAlphaNumeric2 and char2.isspace()
767 | lineBreak1 = whitespace1 and (char1 == "\r" or char1 == "\n")
768 | lineBreak2 = whitespace2 and (char2 == "\r" or char2 == "\n")
769 | blankLine1 = lineBreak1 and self.BLANKLINEEND.search(one)
770 | blankLine2 = lineBreak2 and self.BLANKLINESTART.match(two)
771 |
772 | if blankLine1 or blankLine2:
773 | # Five points for blank lines.
774 | return 5
775 | elif lineBreak1 or lineBreak2:
776 | # Four points for line breaks.
777 | return 4
778 | elif nonAlphaNumeric1 and not whitespace1 and whitespace2:
779 | # Three points for end of sentences.
780 | return 3
781 | elif whitespace1 or whitespace2:
782 | # Two points for whitespace.
783 | return 2
784 | elif nonAlphaNumeric1 or nonAlphaNumeric2:
785 | # One point for non-alphanumeric.
786 | return 1
787 | return 0
788 |
789 | pointer = 1
790 | # Intentionally ignore the first and last element (don't need checking).
791 | while pointer < len(diffs) - 1:
792 | if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
793 | diffs[pointer + 1][0] == self.DIFF_EQUAL):
794 | # This is a single edit surrounded by equalities.
795 | equality1 = diffs[pointer - 1][1]
796 | edit = diffs[pointer][1]
797 | equality2 = diffs[pointer + 1][1]
798 |
799 | # First, shift the edit as far left as possible.
800 | commonOffset = self.diff_commonSuffix(equality1, edit)
801 | if commonOffset:
802 | commonString = edit[-commonOffset:]
803 | equality1 = equality1[:-commonOffset]
804 | edit = commonString + edit[:-commonOffset]
805 | equality2 = commonString + equality2
806 |
807 | # Second, step character by character right, looking for the best fit.
808 | bestEquality1 = equality1
809 | bestEdit = edit
810 | bestEquality2 = equality2
811 | bestScore = (diff_cleanupSemanticScore(equality1, edit) +
812 | diff_cleanupSemanticScore(edit, equality2))
813 | while edit and equality2 and edit[0] == equality2[0]:
814 | equality1 += edit[0]
815 | edit = edit[1:] + equality2[0]
816 | equality2 = equality2[1:]
817 | score = (diff_cleanupSemanticScore(equality1, edit) +
818 | diff_cleanupSemanticScore(edit, equality2))
819 | # The >= encourages trailing rather than leading whitespace on edits.
820 | if score >= bestScore:
821 | bestScore = score
822 | bestEquality1 = equality1
823 | bestEdit = edit
824 | bestEquality2 = equality2
825 |
826 | if diffs[pointer - 1][1] != bestEquality1:
827 | # We have an improvement, save it back to the diff.
828 | if bestEquality1:
829 | diffs[pointer - 1] = (diffs[pointer - 1][0], bestEquality1)
830 | else:
831 | del diffs[pointer - 1]
832 | pointer -= 1
833 | diffs[pointer] = (diffs[pointer][0], bestEdit)
834 | if bestEquality2:
835 | diffs[pointer + 1] = (diffs[pointer + 1][0], bestEquality2)
836 | else:
837 | del diffs[pointer + 1]
838 | pointer -= 1
839 | pointer += 1
840 |
841 | # Define some regex patterns for matching boundaries.
842 | BLANKLINEEND = re.compile(r"\n\r?\n$");
843 | BLANKLINESTART = re.compile(r"^\r?\n\r?\n");
844 |
845 | def diff_cleanupEfficiency(self, diffs):
846 | """Reduce the number of edits by eliminating operationally trivial
847 | equalities.
848 |
849 | Args:
850 | diffs: Array of diff tuples.
851 | """
852 | changes = False
853 | equalities = [] # Stack of indices where equalities are found.
854 | lastequality = None # Always equal to diffs[equalities[-1]][1]
855 | pointer = 0 # Index of current position.
856 | pre_ins = False # Is there an insertion operation before the last equality.
857 | pre_del = False # Is there a deletion operation before the last equality.
858 | post_ins = False # Is there an insertion operation after the last equality.
859 | post_del = False # Is there a deletion operation after the last equality.
860 | while pointer < len(diffs):
861 | if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
862 | if (len(diffs[pointer][1]) < self.Diff_EditCost and
863 | (post_ins or post_del)):
864 | # Candidate found.
865 | equalities.append(pointer)
866 | pre_ins = post_ins
867 | pre_del = post_del
868 | lastequality = diffs[pointer][1]
869 | else:
870 | # Not a candidate, and can never become one.
871 | equalities = []
872 | lastequality = None
873 |
874 | post_ins = post_del = False
875 | else: # An insertion or deletion.
876 | if diffs[pointer][0] == self.DIFF_DELETE:
877 | post_del = True
878 | else:
879 | post_ins = True
880 |
881 | # Five types to be split:
882 | # ABXYCD
883 | # AXCD
884 | # ABXC
885 | # AXCD
886 | # ABXC
887 |
888 | if lastequality and ((pre_ins and pre_del and post_ins and post_del) or
889 | ((len(lastequality) < self.Diff_EditCost / 2) and
890 | (pre_ins + pre_del + post_ins + post_del) == 3)):
891 | # Duplicate record.
892 | diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
893 | # Change second copy to insert.
894 | diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
895 | diffs[equalities[-1] + 1][1])
896 | equalities.pop() # Throw away the equality we just deleted.
897 | lastequality = None
898 | if pre_ins and pre_del:
899 | # No changes made which could affect previous entry, keep going.
900 | post_ins = post_del = True
901 | equalities = []
902 | else:
903 | if len(equalities):
904 | equalities.pop() # Throw away the previous equality.
905 | if len(equalities):
906 | pointer = equalities[-1]
907 | else:
908 | pointer = -1
909 | post_ins = post_del = False
910 | changes = True
911 | pointer += 1
912 |
913 | if changes:
914 | self.diff_cleanupMerge(diffs)
915 |
916 | def diff_cleanupMerge(self, diffs):
917 | """Reorder and merge like edit sections. Merge equalities.
918 | Any edit section can move as long as it doesn't cross an equality.
919 |
920 | Args:
921 | diffs: Array of diff tuples.
922 | """
923 | diffs.append((self.DIFF_EQUAL, '')) # Add a dummy entry at the end.
924 | pointer = 0
925 | count_delete = 0
926 | count_insert = 0
927 | text_delete = ''
928 | text_insert = ''
929 | while pointer < len(diffs):
930 | if diffs[pointer][0] == self.DIFF_INSERT:
931 | count_insert += 1
932 | text_insert += diffs[pointer][1]
933 | pointer += 1
934 | elif diffs[pointer][0] == self.DIFF_DELETE:
935 | count_delete += 1
936 | text_delete += diffs[pointer][1]
937 | pointer += 1
938 | elif diffs[pointer][0] == self.DIFF_EQUAL:
939 | # Upon reaching an equality, check for prior redundancies.
940 | if count_delete + count_insert > 1:
941 | if count_delete != 0 and count_insert != 0:
942 | # Factor out any common prefixies.
943 | commonlength = self.diff_commonPrefix(text_insert, text_delete)
944 | if commonlength != 0:
945 | x = pointer - count_delete - count_insert - 1
946 | if x >= 0 and diffs[x][0] == self.DIFF_EQUAL:
947 | diffs[x] = (diffs[x][0], diffs[x][1] +
948 | text_insert[:commonlength])
949 | else:
950 | diffs.insert(0, (self.DIFF_EQUAL, text_insert[:commonlength]))
951 | pointer += 1
952 | text_insert = text_insert[commonlength:]
953 | text_delete = text_delete[commonlength:]
954 | # Factor out any common suffixies.
955 | commonlength = self.diff_commonSuffix(text_insert, text_delete)
956 | if commonlength != 0:
957 | diffs[pointer] = (diffs[pointer][0], text_insert[-commonlength:] +
958 | diffs[pointer][1])
959 | text_insert = text_insert[:-commonlength]
960 | text_delete = text_delete[:-commonlength]
961 | # Delete the offending records and add the merged ones.
962 | if count_delete == 0:
963 | diffs[pointer - count_insert : pointer] = [
964 | (self.DIFF_INSERT, text_insert)]
965 | elif count_insert == 0:
966 | diffs[pointer - count_delete : pointer] = [
967 | (self.DIFF_DELETE, text_delete)]
968 | else:
969 | diffs[pointer - count_delete - count_insert : pointer] = [
970 | (self.DIFF_DELETE, text_delete),
971 | (self.DIFF_INSERT, text_insert)]
972 | pointer = pointer - count_delete - count_insert + 1
973 | if count_delete != 0:
974 | pointer += 1
975 | if count_insert != 0:
976 | pointer += 1
977 | elif pointer != 0 and diffs[pointer - 1][0] == self.DIFF_EQUAL:
978 | # Merge this equality with the previous one.
979 | diffs[pointer - 1] = (diffs[pointer - 1][0],
980 | diffs[pointer - 1][1] + diffs[pointer][1])
981 | del diffs[pointer]
982 | else:
983 | pointer += 1
984 |
985 | count_insert = 0
986 | count_delete = 0
987 | text_delete = ''
988 | text_insert = ''
989 |
990 | if diffs[-1][1] == '':
991 | diffs.pop() # Remove the dummy entry at the end.
992 |
993 | # Second pass: look for single edits surrounded on both sides by equalities
994 | # which can be shifted sideways to eliminate an equality.
995 | # e.g: ABAC -> ABAC
996 | changes = False
997 | pointer = 1
998 | # Intentionally ignore the first and last element (don't need checking).
999 | while pointer < len(diffs) - 1:
1000 | if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
1001 | diffs[pointer + 1][0] == self.DIFF_EQUAL):
1002 | # This is a single edit surrounded by equalities.
1003 | if diffs[pointer][1].endswith(diffs[pointer - 1][1]):
1004 | # Shift the edit over the previous equality.
1005 | diffs[pointer] = (diffs[pointer][0],
1006 | diffs[pointer - 1][1] +
1007 | diffs[pointer][1][:-len(diffs[pointer - 1][1])])
1008 | diffs[pointer + 1] = (diffs[pointer + 1][0],
1009 | diffs[pointer - 1][1] + diffs[pointer + 1][1])
1010 | del diffs[pointer - 1]
1011 | changes = True
1012 | elif diffs[pointer][1].startswith(diffs[pointer + 1][1]):
1013 | # Shift the edit over the next equality.
1014 | diffs[pointer - 1] = (diffs[pointer - 1][0],
1015 | diffs[pointer - 1][1] + diffs[pointer + 1][1])
1016 | diffs[pointer] = (diffs[pointer][0],
1017 | diffs[pointer][1][len(diffs[pointer + 1][1]):] +
1018 | diffs[pointer + 1][1])
1019 | del diffs[pointer + 1]
1020 | changes = True
1021 | pointer += 1
1022 |
1023 | # If shifts were made, the diff needs reordering and another shift sweep.
1024 | if changes:
1025 | self.diff_cleanupMerge(diffs)
1026 |
1027 | def diff_xIndex(self, diffs, loc):
1028 | """loc is a location in text1, compute and return the equivalent location
1029 | in text2. e.g. "The cat" vs "The big cat", 1->1, 5->8
1030 |
1031 | Args:
1032 | diffs: Array of diff tuples.
1033 | loc: Location within text1.
1034 |
1035 | Returns:
1036 | Location within text2.
1037 | """
1038 | chars1 = 0
1039 | chars2 = 0
1040 | last_chars1 = 0
1041 | last_chars2 = 0
1042 | for x in range(len(diffs)):
1043 | (op, text) = diffs[x]
1044 | if op != self.DIFF_INSERT: # Equality or deletion.
1045 | chars1 += len(text)
1046 | if op != self.DIFF_DELETE: # Equality or insertion.
1047 | chars2 += len(text)
1048 | if chars1 > loc: # Overshot the location.
1049 | break
1050 | last_chars1 = chars1
1051 | last_chars2 = chars2
1052 |
1053 | if len(diffs) != x and diffs[x][0] == self.DIFF_DELETE:
1054 | # The location was deleted.
1055 | return last_chars2
1056 | # Add the remaining len(character).
1057 | return last_chars2 + (loc - last_chars1)
1058 |
1059 | def diff_prettyHtml(self, diffs):
1060 | """Convert a diff array into a pretty HTML report.
1061 |
1062 | Args:
1063 | diffs: Array of diff tuples.
1064 |
1065 | Returns:
1066 | HTML representation.
1067 | """
1068 | html = []
1069 | for (op, data) in diffs:
1070 | text = (data.replace("&", "&").replace("<", "<")
1071 | .replace(">", ">").replace("\n", "¶
"))
1072 | if op == self.DIFF_INSERT:
1073 | html.append("%s" % text)
1074 | elif op == self.DIFF_DELETE:
1075 | html.append("%s" % text)
1076 | elif op == self.DIFF_EQUAL:
1077 | html.append("%s" % text)
1078 | return "".join(html)
1079 |
1080 | def diff_text1(self, diffs):
1081 | """Compute and return the source text (all equalities and deletions).
1082 |
1083 | Args:
1084 | diffs: Array of diff tuples.
1085 |
1086 | Returns:
1087 | Source text.
1088 | """
1089 | text = []
1090 | for (op, data) in diffs:
1091 | if op != self.DIFF_INSERT:
1092 | text.append(data)
1093 | return "".join(text)
1094 |
1095 | def diff_text2(self, diffs):
1096 | """Compute and return the destination text (all equalities and insertions).
1097 |
1098 | Args:
1099 | diffs: Array of diff tuples.
1100 |
1101 | Returns:
1102 | Destination text.
1103 | """
1104 | text = []
1105 | for (op, data) in diffs:
1106 | if op != self.DIFF_DELETE:
1107 | text.append(data)
1108 | return "".join(text)
1109 |
1110 | def diff_levenshtein(self, diffs):
1111 | """Compute the Levenshtein distance; the number of inserted, deleted or
1112 | substituted characters.
1113 |
1114 | Args:
1115 | diffs: Array of diff tuples.
1116 |
1117 | Returns:
1118 | Number of changes.
1119 | """
1120 | levenshtein = 0
1121 | insertions = 0
1122 | deletions = 0
1123 | for (op, data) in diffs:
1124 | if op == self.DIFF_INSERT:
1125 | insertions += len(data)
1126 | elif op == self.DIFF_DELETE:
1127 | deletions += len(data)
1128 | elif op == self.DIFF_EQUAL:
1129 | # A deletion and an insertion is one substitution.
1130 | levenshtein += max(insertions, deletions)
1131 | insertions = 0
1132 | deletions = 0
1133 | levenshtein += max(insertions, deletions)
1134 | return levenshtein
1135 |
1136 | def diff_toDelta(self, diffs):
1137 | """Crush the diff into an encoded string which describes the operations
1138 | required to transform text1 into text2.
1139 | E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'.
1140 | Operations are tab-separated. Inserted text is escaped using %xx notation.
1141 |
1142 | Args:
1143 | diffs: Array of diff tuples.
1144 |
1145 | Returns:
1146 | Delta text.
1147 | """
1148 | text = []
1149 | for (op, data) in diffs:
1150 | if op == self.DIFF_INSERT:
1151 | # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1152 | data = data.encode("utf-8")
1153 | text.append("+" + urllib.parse.quote(data, "!~*'();/?:@&=+$,# "))
1154 | elif op == self.DIFF_DELETE:
1155 | text.append("-%d" % len(data))
1156 | elif op == self.DIFF_EQUAL:
1157 | text.append("=%d" % len(data))
1158 | return "\t".join(text)
1159 |
1160 | def diff_fromDelta(self, text1, delta):
1161 | """Given the original text1, and an encoded string which describes the
1162 | operations required to transform text1 into text2, compute the full diff.
1163 |
1164 | Args:
1165 | text1: Source string for the diff.
1166 | delta: Delta text.
1167 |
1168 | Returns:
1169 | Array of diff tuples.
1170 |
1171 | Raises:
1172 | ValueError: If invalid input.
1173 | """
1174 | diffs = []
1175 | pointer = 0 # Cursor in text1
1176 | tokens = delta.split("\t")
1177 | for token in tokens:
1178 | if token == "":
1179 | # Blank tokens are ok (from a trailing \t).
1180 | continue
1181 | # Each token begins with a one character parameter which specifies the
1182 | # operation of this token (delete, insert, equality).
1183 | param = token[1:]
1184 | if token[0] == "+":
1185 | param = urllib.parse.unquote(param)
1186 | diffs.append((self.DIFF_INSERT, param))
1187 | elif token[0] == "-" or token[0] == "=":
1188 | try:
1189 | n = int(param)
1190 | except ValueError:
1191 | raise ValueError("Invalid number in diff_fromDelta: " + param)
1192 | if n < 0:
1193 | raise ValueError("Negative number in diff_fromDelta: " + param)
1194 | text = text1[pointer : pointer + n]
1195 | pointer += n
1196 | if token[0] == "=":
1197 | diffs.append((self.DIFF_EQUAL, text))
1198 | else:
1199 | diffs.append((self.DIFF_DELETE, text))
1200 | else:
1201 | # Anything else is an error.
1202 | raise ValueError("Invalid diff operation in diff_fromDelta: " +
1203 | token[0])
1204 | if pointer != len(text1):
1205 | raise ValueError(
1206 | "Delta length (%d) does not equal source text length (%d)." %
1207 | (pointer, len(text1)))
1208 | return diffs
1209 |
1210 | # MATCH FUNCTIONS
1211 |
1212 | def match_main(self, text, pattern, loc):
1213 | """Locate the best instance of 'pattern' in 'text' near 'loc'.
1214 |
1215 | Args:
1216 | text: The text to search.
1217 | pattern: The pattern to search for.
1218 | loc: The location to search around.
1219 |
1220 | Returns:
1221 | Best match index or -1.
1222 | """
1223 | # Check for null inputs.
1224 | if text == None or pattern == None:
1225 | raise ValueError("Null inputs. (match_main)")
1226 |
1227 | loc = max(0, min(loc, len(text)))
1228 | if text == pattern:
1229 | # Shortcut (potentially not guaranteed by the algorithm)
1230 | return 0
1231 | elif not text:
1232 | # Nothing to match.
1233 | return -1
1234 | elif text[loc:loc + len(pattern)] == pattern:
1235 | # Perfect match at the perfect spot! (Includes case of null pattern)
1236 | return loc
1237 | else:
1238 | # Do a fuzzy compare.
1239 | match = self.match_bitap(text, pattern, loc)
1240 | return match
1241 |
1242 | def match_bitap(self, text, pattern, loc):
1243 | """Locate the best instance of 'pattern' in 'text' near 'loc' using the
1244 | Bitap algorithm.
1245 |
1246 | Args:
1247 | text: The text to search.
1248 | pattern: The pattern to search for.
1249 | loc: The location to search around.
1250 |
1251 | Returns:
1252 | Best match index or -1.
1253 | """
1254 | # Python doesn't have a maxint limit, so ignore this check.
1255 | #if self.Match_MaxBits != 0 and len(pattern) > self.Match_MaxBits:
1256 | # raise ValueError("Pattern too long for this application.")
1257 |
1258 | # Initialise the alphabet.
1259 | s = self.match_alphabet(pattern)
1260 |
1261 | def match_bitapScore(e, x):
1262 | """Compute and return the score for a match with e errors and x location.
1263 | Accesses loc and pattern through being a closure.
1264 |
1265 | Args:
1266 | e: Number of errors in match.
1267 | x: Location of match.
1268 |
1269 | Returns:
1270 | Overall score for match (0.0 = good, 1.0 = bad).
1271 | """
1272 | accuracy = float(e) / len(pattern)
1273 | proximity = abs(loc - x)
1274 | if not self.Match_Distance:
1275 | # Dodge divide by zero error.
1276 | return proximity and 1.0 or accuracy
1277 | return accuracy + (proximity / float(self.Match_Distance))
1278 |
1279 | # Highest score beyond which we give up.
1280 | score_threshold = self.Match_Threshold
1281 | # Is there a nearby exact match? (speedup)
1282 | best_loc = text.find(pattern, loc)
1283 | if best_loc != -1:
1284 | score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1285 | # What about in the other direction? (speedup)
1286 | best_loc = text.rfind(pattern, loc + len(pattern))
1287 | if best_loc != -1:
1288 | score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1289 |
1290 | # Initialise the bit arrays.
1291 | matchmask = 1 << (len(pattern) - 1)
1292 | best_loc = -1
1293 |
1294 | bin_max = len(pattern) + len(text)
1295 | # Empty initialization added to appease pychecker.
1296 | last_rd = None
1297 | for d in range(len(pattern)):
1298 | # Scan for the best match each iteration allows for one more error.
1299 | # Run a binary search to determine how far from 'loc' we can stray at
1300 | # this error level.
1301 | bin_min = 0
1302 | bin_mid = bin_max
1303 | while bin_min < bin_mid:
1304 | if match_bitapScore(d, loc + bin_mid) <= score_threshold:
1305 | bin_min = bin_mid
1306 | else:
1307 | bin_max = bin_mid
1308 | bin_mid = (bin_max - bin_min) // 2 + bin_min
1309 |
1310 | # Use the result from this iteration as the maximum for the next.
1311 | bin_max = bin_mid
1312 | start = max(1, loc - bin_mid + 1)
1313 | finish = min(loc + bin_mid, len(text)) + len(pattern)
1314 |
1315 | rd = [0] * (finish + 2)
1316 | rd[finish + 1] = (1 << d) - 1
1317 | for j in range(finish, start - 1, -1):
1318 | if len(text) <= j - 1:
1319 | # Out of range.
1320 | charMatch = 0
1321 | else:
1322 | charMatch = s.get(text[j - 1], 0)
1323 | if d == 0: # First pass: exact match.
1324 | rd[j] = ((rd[j + 1] << 1) | 1) & charMatch
1325 | else: # Subsequent passes: fuzzy match.
1326 | rd[j] = (((rd[j + 1] << 1) | 1) & charMatch) | (
1327 | ((last_rd[j + 1] | last_rd[j]) << 1) | 1) | last_rd[j + 1]
1328 | if rd[j] & matchmask:
1329 | score = match_bitapScore(d, j - 1)
1330 | # This match will almost certainly be better than any existing match.
1331 | # But check anyway.
1332 | if score <= score_threshold:
1333 | # Told you so.
1334 | score_threshold = score
1335 | best_loc = j - 1
1336 | if best_loc > loc:
1337 | # When passing loc, don't exceed our current distance from loc.
1338 | start = max(1, 2 * loc - best_loc)
1339 | else:
1340 | # Already passed loc, downhill from here on in.
1341 | break
1342 | # No hope for a (better) match at greater error levels.
1343 | if match_bitapScore(d + 1, loc) > score_threshold:
1344 | break
1345 | last_rd = rd
1346 | return best_loc
1347 |
1348 | def match_alphabet(self, pattern):
1349 | """Initialise the alphabet for the Bitap algorithm.
1350 |
1351 | Args:
1352 | pattern: The text to encode.
1353 |
1354 | Returns:
1355 | Hash of character locations.
1356 | """
1357 | s = {}
1358 | for char in pattern:
1359 | s[char] = 0
1360 | for i in range(len(pattern)):
1361 | s[pattern[i]] |= 1 << (len(pattern) - i - 1)
1362 | return s
1363 |
1364 | # PATCH FUNCTIONS
1365 |
1366 | def patch_addContext(self, patch, text):
1367 | """Increase the context until it is unique,
1368 | but don't let the pattern expand beyond Match_MaxBits.
1369 |
1370 | Args:
1371 | patch: The patch to grow.
1372 | text: Source text.
1373 | """
1374 | if len(text) == 0:
1375 | return
1376 | pattern = text[patch.start2 : patch.start2 + patch.length1]
1377 | padding = 0
1378 |
1379 | # Look for the first and last matches of pattern in text. If two different
1380 | # matches are found, increase the pattern length.
1381 | while (text.find(pattern) != text.rfind(pattern) and (self.Match_MaxBits ==
1382 | 0 or len(pattern) < self.Match_MaxBits - self.Patch_Margin -
1383 | self.Patch_Margin)):
1384 | padding += self.Patch_Margin
1385 | pattern = text[max(0, patch.start2 - padding) :
1386 | patch.start2 + patch.length1 + padding]
1387 | # Add one chunk for good luck.
1388 | padding += self.Patch_Margin
1389 |
1390 | # Add the prefix.
1391 | prefix = text[max(0, patch.start2 - padding) : patch.start2]
1392 | if prefix:
1393 | patch.diffs[:0] = [(self.DIFF_EQUAL, prefix)]
1394 | # Add the suffix.
1395 | suffix = text[patch.start2 + patch.length1 :
1396 | patch.start2 + patch.length1 + padding]
1397 | if suffix:
1398 | patch.diffs.append((self.DIFF_EQUAL, suffix))
1399 |
1400 | # Roll back the start points.
1401 | patch.start1 -= len(prefix)
1402 | patch.start2 -= len(prefix)
1403 | # Extend lengths.
1404 | patch.length1 += len(prefix) + len(suffix)
1405 | patch.length2 += len(prefix) + len(suffix)
1406 |
1407 | def patch_make(self, a, b=None, c=None):
1408 | """Compute a list of patches to turn text1 into text2.
1409 | Use diffs if provided, otherwise compute it ourselves.
1410 | There are four ways to call this function, depending on what data is
1411 | available to the caller:
1412 | Method 1:
1413 | a = text1, b = text2
1414 | Method 2:
1415 | a = diffs
1416 | Method 3 (optimal):
1417 | a = text1, b = diffs
1418 | Method 4 (deprecated, use method 3):
1419 | a = text1, b = text2, c = diffs
1420 |
1421 | Args:
1422 | a: text1 (methods 1,3,4) or Array of diff tuples for text1 to
1423 | text2 (method 2).
1424 | b: text2 (methods 1,4) or Array of diff tuples for text1 to
1425 | text2 (method 3) or undefined (method 2).
1426 | c: Array of diff tuples for text1 to text2 (method 4) or
1427 | undefined (methods 1,2,3).
1428 |
1429 | Returns:
1430 | Array of Patch objects.
1431 | """
1432 | text1 = None
1433 | diffs = None
1434 | if isinstance(a, str) and isinstance(b, str) and c is None:
1435 | # Method 1: text1, text2
1436 | # Compute diffs from text1 and text2.
1437 | text1 = a
1438 | diffs = self.diff_main(text1, b, True)
1439 | if len(diffs) > 2:
1440 | self.diff_cleanupSemantic(diffs)
1441 | self.diff_cleanupEfficiency(diffs)
1442 | elif isinstance(a, list) and b is None and c is None:
1443 | # Method 2: diffs
1444 | # Compute text1 from diffs.
1445 | diffs = a
1446 | text1 = self.diff_text1(diffs)
1447 | elif isinstance(a, str) and isinstance(b, list) and c is None:
1448 | # Method 3: text1, diffs
1449 | text1 = a
1450 | diffs = b
1451 | elif (isinstance(a, str) and isinstance(b, str) and
1452 | isinstance(c, list)):
1453 | # Method 4: text1, text2, diffs
1454 | # text2 is not used.
1455 | text1 = a
1456 | diffs = c
1457 | else:
1458 | raise ValueError("Unknown call format to patch_make.")
1459 |
1460 | if not diffs:
1461 | return [] # Get rid of the None case.
1462 | patches = []
1463 | patch = patch_obj()
1464 | char_count1 = 0 # Number of characters into the text1 string.
1465 | char_count2 = 0 # Number of characters into the text2 string.
1466 | prepatch_text = text1 # Recreate the patches to determine context info.
1467 | postpatch_text = text1
1468 | for x in range(len(diffs)):
1469 | (diff_type, diff_text) = diffs[x]
1470 | if len(patch.diffs) == 0 and diff_type != self.DIFF_EQUAL:
1471 | # A new patch starts here.
1472 | patch.start1 = char_count1
1473 | patch.start2 = char_count2
1474 | if diff_type == self.DIFF_INSERT:
1475 | # Insertion
1476 | patch.diffs.append(diffs[x])
1477 | patch.length2 += len(diff_text)
1478 | postpatch_text = (postpatch_text[:char_count2] + diff_text +
1479 | postpatch_text[char_count2:])
1480 | elif diff_type == self.DIFF_DELETE:
1481 | # Deletion.
1482 | patch.length1 += len(diff_text)
1483 | patch.diffs.append(diffs[x])
1484 | postpatch_text = (postpatch_text[:char_count2] +
1485 | postpatch_text[char_count2 + len(diff_text):])
1486 | elif (diff_type == self.DIFF_EQUAL and
1487 | len(diff_text) <= 2 * self.Patch_Margin and
1488 | len(patch.diffs) != 0 and len(diffs) != x + 1):
1489 | # Small equality inside a patch.
1490 | patch.diffs.append(diffs[x])
1491 | patch.length1 += len(diff_text)
1492 | patch.length2 += len(diff_text)
1493 |
1494 | if (diff_type == self.DIFF_EQUAL and
1495 | len(diff_text) >= 2 * self.Patch_Margin):
1496 | # Time for a new patch.
1497 | if len(patch.diffs) != 0:
1498 | self.patch_addContext(patch, prepatch_text)
1499 | patches.append(patch)
1500 | patch = patch_obj()
1501 | # Unlike Unidiff, our patch lists have a rolling context.
1502 | # http://code.google.com/p/google-diff-match-patch/wiki/Unidiff
1503 | # Update prepatch text & pos to reflect the application of the
1504 | # just completed patch.
1505 | prepatch_text = postpatch_text
1506 | char_count1 = char_count2
1507 |
1508 | # Update the current character count.
1509 | if diff_type != self.DIFF_INSERT:
1510 | char_count1 += len(diff_text)
1511 | if diff_type != self.DIFF_DELETE:
1512 | char_count2 += len(diff_text)
1513 |
1514 | # Pick up the leftover patch if not empty.
1515 | if len(patch.diffs) != 0:
1516 | self.patch_addContext(patch, prepatch_text)
1517 | patches.append(patch)
1518 | return patches
1519 |
1520 | def patch_deepCopy(self, patches):
1521 | """Given an array of patches, return another array that is identical.
1522 |
1523 | Args:
1524 | patches: Array of Patch objects.
1525 |
1526 | Returns:
1527 | Array of Patch objects.
1528 | """
1529 | patchesCopy = []
1530 | for patch in patches:
1531 | patchCopy = patch_obj()
1532 | # No need to deep copy the tuples since they are immutable.
1533 | patchCopy.diffs = patch.diffs[:]
1534 | patchCopy.start1 = patch.start1
1535 | patchCopy.start2 = patch.start2
1536 | patchCopy.length1 = patch.length1
1537 | patchCopy.length2 = patch.length2
1538 | patchesCopy.append(patchCopy)
1539 | return patchesCopy
1540 |
1541 | def patch_apply(self, patches, text):
1542 | """Merge a set of patches onto the text. Return a patched text, as well
1543 | as a list of true/false values indicating which patches were applied.
1544 |
1545 | Args:
1546 | patches: Array of Patch objects.
1547 | text: Old text.
1548 |
1549 | Returns:
1550 | Two element Array, containing the new text and an array of boolean values.
1551 | """
1552 | if not patches:
1553 | return (text, [])
1554 |
1555 | # Deep copy the patches so that no changes are made to originals.
1556 | patches = self.patch_deepCopy(patches)
1557 |
1558 | nullPadding = self.patch_addPadding(patches)
1559 | text = nullPadding + text + nullPadding
1560 | self.patch_splitMax(patches)
1561 |
1562 | # delta keeps track of the offset between the expected and actual location
1563 | # of the previous patch. If there are patches expected at positions 10 and
1564 | # 20, but the first patch was found at 12, delta is 2 and the second patch
1565 | # has an effective expected position of 22.
1566 | delta = 0
1567 | results = []
1568 | for patch in patches:
1569 | expected_loc = patch.start2 + delta
1570 | text1 = self.diff_text1(patch.diffs)
1571 | end_loc = -1
1572 | if len(text1) > self.Match_MaxBits:
1573 | # patch_splitMax will only provide an oversized pattern in the case of
1574 | # a monster delete.
1575 | start_loc = self.match_main(text, text1[:self.Match_MaxBits],
1576 | expected_loc)
1577 | if start_loc != -1:
1578 | end_loc = self.match_main(text, text1[-self.Match_MaxBits:],
1579 | expected_loc + len(text1) - self.Match_MaxBits)
1580 | if end_loc == -1 or start_loc >= end_loc:
1581 | # Can't find valid trailing context. Drop this patch.
1582 | start_loc = -1
1583 | else:
1584 | start_loc = self.match_main(text, text1, expected_loc)
1585 | if start_loc == -1:
1586 | # No match found. :(
1587 | results.append(False)
1588 | # Subtract the delta for this failed patch from subsequent patches.
1589 | delta -= patch.length2 - patch.length1
1590 | else:
1591 | # Found a match. :)
1592 | results.append(True)
1593 | delta = start_loc - expected_loc
1594 | if end_loc == -1:
1595 | text2 = text[start_loc : start_loc + len(text1)]
1596 | else:
1597 | text2 = text[start_loc : end_loc + self.Match_MaxBits]
1598 | if text1 == text2:
1599 | # Perfect match, just shove the replacement text in.
1600 | text = (text[:start_loc] + self.diff_text2(patch.diffs) +
1601 | text[start_loc + len(text1):])
1602 | else:
1603 | # Imperfect match.
1604 | # Run a diff to get a framework of equivalent indices.
1605 | diffs = self.diff_main(text1, text2, False)
1606 | if (len(text1) > self.Match_MaxBits and
1607 | self.diff_levenshtein(diffs) / float(len(text1)) >
1608 | self.Patch_DeleteThreshold):
1609 | # The end points match, but the content is unacceptably bad.
1610 | results[-1] = False
1611 | else:
1612 | self.diff_cleanupSemanticLossless(diffs)
1613 | index1 = 0
1614 | for (op, data) in patch.diffs:
1615 | if op != self.DIFF_EQUAL:
1616 | index2 = self.diff_xIndex(diffs, index1)
1617 | if op == self.DIFF_INSERT: # Insertion
1618 | text = text[:start_loc + index2] + data + text[start_loc +
1619 | index2:]
1620 | elif op == self.DIFF_DELETE: # Deletion
1621 | text = text[:start_loc + index2] + text[start_loc +
1622 | self.diff_xIndex(diffs, index1 + len(data)):]
1623 | if op != self.DIFF_DELETE:
1624 | index1 += len(data)
1625 | # Strip the padding off.
1626 | text = text[len(nullPadding):-len(nullPadding)]
1627 | return (text, results)
1628 |
1629 | def patch_addPadding(self, patches):
1630 | """Add some padding on text start and end so that edges can match
1631 | something. Intended to be called only from within patch_apply.
1632 |
1633 | Args:
1634 | patches: Array of Patch objects.
1635 |
1636 | Returns:
1637 | The padding string added to each side.
1638 | """
1639 | paddingLength = self.Patch_Margin
1640 | nullPadding = ""
1641 | for x in range(1, paddingLength + 1):
1642 | nullPadding += chr(x)
1643 |
1644 | # Bump all the patches forward.
1645 | for patch in patches:
1646 | patch.start1 += paddingLength
1647 | patch.start2 += paddingLength
1648 |
1649 | # Add some padding on start of first diff.
1650 | patch = patches[0]
1651 | diffs = patch.diffs
1652 | if not diffs or diffs[0][0] != self.DIFF_EQUAL:
1653 | # Add nullPadding equality.
1654 | diffs.insert(0, (self.DIFF_EQUAL, nullPadding))
1655 | patch.start1 -= paddingLength # Should be 0.
1656 | patch.start2 -= paddingLength # Should be 0.
1657 | patch.length1 += paddingLength
1658 | patch.length2 += paddingLength
1659 | elif paddingLength > len(diffs[0][1]):
1660 | # Grow first equality.
1661 | extraLength = paddingLength - len(diffs[0][1])
1662 | newText = nullPadding[len(diffs[0][1]):] + diffs[0][1]
1663 | diffs[0] = (diffs[0][0], newText)
1664 | patch.start1 -= extraLength
1665 | patch.start2 -= extraLength
1666 | patch.length1 += extraLength
1667 | patch.length2 += extraLength
1668 |
1669 | # Add some padding on end of last diff.
1670 | patch = patches[-1]
1671 | diffs = patch.diffs
1672 | if not diffs or diffs[-1][0] != self.DIFF_EQUAL:
1673 | # Add nullPadding equality.
1674 | diffs.append((self.DIFF_EQUAL, nullPadding))
1675 | patch.length1 += paddingLength
1676 | patch.length2 += paddingLength
1677 | elif paddingLength > len(diffs[-1][1]):
1678 | # Grow last equality.
1679 | extraLength = paddingLength - len(diffs[-1][1])
1680 | newText = diffs[-1][1] + nullPadding[:extraLength]
1681 | diffs[-1] = (diffs[-1][0], newText)
1682 | patch.length1 += extraLength
1683 | patch.length2 += extraLength
1684 |
1685 | return nullPadding
1686 |
1687 | def patch_splitMax(self, patches):
1688 | """Look through the patches and break up any which are longer than the
1689 | maximum limit of the match algorithm.
1690 | Intended to be called only from within patch_apply.
1691 |
1692 | Args:
1693 | patches: Array of Patch objects.
1694 | """
1695 | patch_size = self.Match_MaxBits
1696 | if patch_size == 0:
1697 | # Python has the option of not splitting strings due to its ability
1698 | # to handle integers of arbitrary precision.
1699 | return
1700 | for x in range(len(patches)):
1701 | if patches[x].length1 <= patch_size:
1702 | continue
1703 | bigpatch = patches[x]
1704 | # Remove the big old patch.
1705 | del patches[x]
1706 | x -= 1
1707 | start1 = bigpatch.start1
1708 | start2 = bigpatch.start2
1709 | precontext = ''
1710 | while len(bigpatch.diffs) != 0:
1711 | # Create one of several smaller patches.
1712 | patch = patch_obj()
1713 | empty = True
1714 | patch.start1 = start1 - len(precontext)
1715 | patch.start2 = start2 - len(precontext)
1716 | if precontext:
1717 | patch.length1 = patch.length2 = len(precontext)
1718 | patch.diffs.append((self.DIFF_EQUAL, precontext))
1719 |
1720 | while (len(bigpatch.diffs) != 0 and
1721 | patch.length1 < patch_size - self.Patch_Margin):
1722 | (diff_type, diff_text) = bigpatch.diffs[0]
1723 | if diff_type == self.DIFF_INSERT:
1724 | # Insertions are harmless.
1725 | patch.length2 += len(diff_text)
1726 | start2 += len(diff_text)
1727 | patch.diffs.append(bigpatch.diffs.pop(0))
1728 | empty = False
1729 | elif (diff_type == self.DIFF_DELETE and len(patch.diffs) == 1 and
1730 | patch.diffs[0][0] == self.DIFF_EQUAL and
1731 | len(diff_text) > 2 * patch_size):
1732 | # This is a large deletion. Let it pass in one chunk.
1733 | patch.length1 += len(diff_text)
1734 | start1 += len(diff_text)
1735 | empty = False
1736 | patch.diffs.append((diff_type, diff_text))
1737 | del bigpatch.diffs[0]
1738 | else:
1739 | # Deletion or equality. Only take as much as we can stomach.
1740 | diff_text = diff_text[:patch_size - patch.length1 -
1741 | self.Patch_Margin]
1742 | patch.length1 += len(diff_text)
1743 | start1 += len(diff_text)
1744 | if diff_type == self.DIFF_EQUAL:
1745 | patch.length2 += len(diff_text)
1746 | start2 += len(diff_text)
1747 | else:
1748 | empty = False
1749 |
1750 | patch.diffs.append((diff_type, diff_text))
1751 | if diff_text == bigpatch.diffs[0][1]:
1752 | del bigpatch.diffs[0]
1753 | else:
1754 | bigpatch.diffs[0] = (bigpatch.diffs[0][0],
1755 | bigpatch.diffs[0][1][len(diff_text):])
1756 |
1757 | # Compute the head context for the next patch.
1758 | precontext = self.diff_text2(patch.diffs)
1759 | precontext = precontext[-self.Patch_Margin:]
1760 | # Append the end context for this patch.
1761 | postcontext = self.diff_text1(bigpatch.diffs)[:self.Patch_Margin]
1762 | if postcontext:
1763 | patch.length1 += len(postcontext)
1764 | patch.length2 += len(postcontext)
1765 | if len(patch.diffs) != 0 and patch.diffs[-1][0] == self.DIFF_EQUAL:
1766 | patch.diffs[-1] = (self.DIFF_EQUAL, patch.diffs[-1][1] +
1767 | postcontext)
1768 | else:
1769 | patch.diffs.append((self.DIFF_EQUAL, postcontext))
1770 |
1771 | if not empty:
1772 | x += 1
1773 | patches.insert(x, patch)
1774 |
1775 | def patch_toText(self, patches):
1776 | """Take a list of patches and return a textual representation.
1777 |
1778 | Args:
1779 | patches: Array of Patch objects.
1780 |
1781 | Returns:
1782 | Text representation of patches.
1783 | """
1784 | text = []
1785 | for patch in patches:
1786 | text.append(str(patch))
1787 | return "".join(text)
1788 |
1789 | def patch_fromText(self, textline):
1790 | """Parse a textual representation of patches and return a list of patch
1791 | objects.
1792 |
1793 | Args:
1794 | textline: Text representation of patches.
1795 |
1796 | Returns:
1797 | Array of Patch objects.
1798 |
1799 | Raises:
1800 | ValueError: If invalid input.
1801 | """
1802 | patches = []
1803 | if not textline:
1804 | return patches
1805 | text = textline.split('\n')
1806 | while len(text) != 0:
1807 | m = re.match("^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$", text[0])
1808 | if not m:
1809 | raise ValueError("Invalid patch string: " + text[0])
1810 | patch = patch_obj()
1811 | patches.append(patch)
1812 | patch.start1 = int(m.group(1))
1813 | if m.group(2) == '':
1814 | patch.start1 -= 1
1815 | patch.length1 = 1
1816 | elif m.group(2) == '0':
1817 | patch.length1 = 0
1818 | else:
1819 | patch.start1 -= 1
1820 | patch.length1 = int(m.group(2))
1821 |
1822 | patch.start2 = int(m.group(3))
1823 | if m.group(4) == '':
1824 | patch.start2 -= 1
1825 | patch.length2 = 1
1826 | elif m.group(4) == '0':
1827 | patch.length2 = 0
1828 | else:
1829 | patch.start2 -= 1
1830 | patch.length2 = int(m.group(4))
1831 |
1832 | del text[0]
1833 |
1834 | while len(text) != 0:
1835 | if text[0]:
1836 | sign = text[0][0]
1837 | else:
1838 | sign = ''
1839 | line = urllib.parse.unquote(text[0][1:])
1840 | if sign == '+':
1841 | # Insertion.
1842 | patch.diffs.append((self.DIFF_INSERT, line))
1843 | elif sign == '-':
1844 | # Deletion.
1845 | patch.diffs.append((self.DIFF_DELETE, line))
1846 | elif sign == ' ':
1847 | # Minor equality.
1848 | patch.diffs.append((self.DIFF_EQUAL, line))
1849 | elif sign == '@':
1850 | # Start of next patch.
1851 | break
1852 | elif sign == '':
1853 | # Blank line? Whatever.
1854 | pass
1855 | else:
1856 | # WTF?
1857 | raise ValueError("Invalid patch mode: '%s'\n%s" % (sign, line))
1858 | del text[0]
1859 | return patches
1860 |
1861 |
1862 | class patch_obj:
1863 | """Class representing one patch operation.
1864 | """
1865 |
1866 | def __init__(self):
1867 | """Initializes with an empty list of diffs.
1868 | """
1869 | self.diffs = []
1870 | self.start1 = None
1871 | self.start2 = None
1872 | self.length1 = 0
1873 | self.length2 = 0
1874 |
1875 | def __str__(self):
1876 | """Emmulate GNU diff's format.
1877 | Header: @@ -382,8 +481,9 @@
1878 | Indicies are printed as 1-based, not 0-based.
1879 |
1880 | Returns:
1881 | The GNU diff string.
1882 | """
1883 | if self.length1 == 0:
1884 | coords1 = str(self.start1) + ",0"
1885 | elif self.length1 == 1:
1886 | coords1 = str(self.start1 + 1)
1887 | else:
1888 | coords1 = str(self.start1 + 1) + "," + str(self.length1)
1889 | if self.length2 == 0:
1890 | coords2 = str(self.start2) + ",0"
1891 | elif self.length2 == 1:
1892 | coords2 = str(self.start2 + 1)
1893 | else:
1894 | coords2 = str(self.start2 + 1) + "," + str(self.length2)
1895 | text = ["@@ -", coords1, " +", coords2, " @@\n"]
1896 | # Escape the body of the patch with %xx notation.
1897 | for (op, data) in self.diffs:
1898 | if op == diff_match_patch.DIFF_INSERT:
1899 | text.append("+")
1900 | elif op == diff_match_patch.DIFF_DELETE:
1901 | text.append("-")
1902 | elif op == diff_match_patch.DIFF_EQUAL:
1903 | text.append(" ")
1904 | # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1905 | data = data.encode("utf-8")
1906 | text.append(urllib.parse.quote(data, "!~*'();/?:@&=+$,# ") + "\n")
1907 | return "".join(text)
1908 |
--------------------------------------------------------------------------------
/messages.json:
--------------------------------------------------------------------------------
1 | {
2 | "install": "messages/install.txt",
3 | "0.0.1": "messages/0.0.1.txt",
4 | }
--------------------------------------------------------------------------------
/messages/0.0.1.txt:
--------------------------------------------------------------------------------
1 | Version 0.0.1
2 | -------------
3 |
4 | + Initial release version
5 | + currently supports a single session at a time
6 | + currently supports up to two users per session
--------------------------------------------------------------------------------
/messages/install.txt:
--------------------------------------------------------------------------------
1 | Remote Collab for SublimeText
2 | =============================
3 |
4 | Remote Collab is an open-source SublimeText plugin for remote pair programming, allowing multiple developers to work together on the same project in real-time.
5 |
6 | How to Install
7 | --------------
8 |
9 | #### Via Package Control
10 |
11 | The easiest way to install is using [Sublime Package Control](https://sublime.wbond.net/).
12 |
13 | 1. Open Command Palette using menu item `Tools -> Command Palette...` (⇧⌘P on Mac)
14 | 2. Choose `Package Control: Install Package`
15 | 3. Find `RemoteCollab` and hit Enter
16 |
17 | #### Manual
18 |
19 | You can also install Remote Collab manually:
20 |
21 | 1. Download the .zip or .tar.gz archive
22 | 2. Unzip and rename the folder to `RemoteCollab`
23 | 3. Copy the folder into `Packages` directory, which you can find using the menu item `Sublime Text -> Preferences -> Browse Packages...`
24 |
25 | How to Use
26 | ----------
27 |
28 | #### Host a session
29 |
30 | 1. Open the file you wish to collaboratively edit
31 | 2. Open Command Palette using menu item `Tools -> Command Palette...` (⇧⌘P on Mac)
32 | 3. Choose `Remote: Host Session`
33 | 4. You are now hosting a Remote Collab session. Give your IP address to the remote colleage you wish to collaborate with and they can join your session.
34 |
35 | #### Join a session
36 |
37 | 1. Open Command Palette using menu item `Tools -> Command Palette...` (⇧⌘P on Mac)
38 | 2. Choose `Remote: Connect to Session`
39 | 3. Enter the IP address of the host whose session you wish to connect to.
40 | 4. You are now collaboratively editing a document with the host!
41 |
42 | Team Remote
43 | -----------
44 |
45 | Team Remote is Hawk Weisman (@hawkw), Dibyojyoti Mukherjee (@dibyom), Soukaina Hamimoune (@hamimounes), and Andreas Bach Landgrebe (@grebes15). We are students at Allegheny College.
46 |
--------------------------------------------------------------------------------
/out:
--------------------------------------------------------------------------------
1 | \documentclass[t,ignorenonframetext]{beamer}
2 | %\usepackage{beamerthemeJuanLesPins}%
3 | %\usepackage{beamercolorthemecougar}
4 | %\usepackage{beamerinnerthemecircles}
5 | \mode
6 | {
7 | \usetheme{AnnArbor}
8 | \usecolortheme{default}
9 | %\usefonttheme{serif}
10 | }
11 |
12 | \usefonttheme{structuresmallcapsserif}
13 | \usepackage{enumerate,graphicx,enumerate}
14 | \usepackage[english]{babel}
15 | % The line below is what I talked about that makes all
16 | % items in a list into overlays
17 | %\beamerdefaultoverlayspecification{<+->}
18 |
19 | \newcommand{\tc}[1]{$\backslash$\texttt{#1}}
20 |
21 | \title[GAs for energy efficenct buildings]{Optimizing Energy Efficiency in Buildings using Parallel Multi-Objective Genetic Algorithms}
22 | \author{Dibyajyoti Mukherjee}
23 | \institute[]
24 | {
25 | Allegheny College \\
26 | \medskip
27 | {\emph{mukherjeed@allegheny.edu}}
28 | }
29 | \begin{document}
30 | \frame{
31 | \maketitle
32 | }
33 | \frame{
34 | \tableofcontents
35 | }
36 |
37 |
38 |
39 | \section[Introduction]{Introduction}
40 |
41 | \begin{frame}{Goals}
42 | \frametitle{Goals}
43 | \vspace*{.5in}
44 | \pause
45 | \begin{block}{Develop a genetic algorithm based system for optimizing energy efficiency and associated cost for building refurbishment}
46 | \end{block}
47 | \pause
48 | \begin{block}{Implement a fast evaluation stratergy using master-slave distributed evaluation}
49 | \end{block}
50 | \pause
51 | \begin{block}{Changing this to something else}
52 | \end{block}
53 | \end{frame}
54 |
55 | \begin{frame}{Related Work}
56 | \frametitle{Related Work}
57 | \begin{table}[h]
58 | \caption{Overview of the five studies reviewed}
59 | \centering
60 | \begin{tabular}{|l|l|l|}
61 | \hline
62 | Author & Year & Study Focus \\ \hline
63 | Wang et al. & 2005 & \begin{tabular}[c]{@{}c@{}}Object oriented framework for building energy \\ usage optimization\end{tabular} \\ \hline
64 | Pernodet et al. & 2009 & GAs for building refurbishment optimization \\ \hline
65 | Magnier et al. & 2010 & \begin{tabular}[c]{@{}c@{}}Thermal comfort and energy usage optimization \\ using GA and neural networks\end{tabular} \\ \hline
66 | Pejicic et al. & 2012 & \begin{tabular}[c]{@{}c@{}}Optimal energy efficient building design \\ using GA and tabu search\end{tabular} \\ \hline
67 | Milajic et al. & 2013 & \begin{tabular}[c]{@{}c@{}}Methodology for green building design \\ using multi-objective GA\end{tabular} \\ \hline
68 | \end{tabular}
69 | \label{table:lit_review}
70 | \end{table}
71 | \end{frame}
72 |
73 | \section[Implementation]{Implementation}
74 |
75 | \begin{frame}{Building Variables}
76 | \frametitle{Building Variables}
77 | \begin{table}[h]
78 | \caption{HVAC System variables }
79 | \label{table:hvac}
80 | \centering
81 | \begin{tabular}{|l|l|l|}
82 | \hline
83 | Variable name & Type & Unit \\ \hline
84 | Window glazing Material & Discrete & N/A \\
85 | Insulation Material & Discrete & N/A \\
86 | Heating Set Point & Continuous & $^{\circ}$C \\
87 | Cooling Set Point & Continuous & $^{\circ}$C \\
88 | HVAC System Type & Discrete & N/A \\ \hline
89 | \end{tabular}
90 | \end{table}
91 | \end{frame}
92 |
93 | \begin{frame}{System Architecture}
94 | \frametitle{System Architecture}
95 | \begin{figure}[htbp]
96 | \centering
97 | \includegraphics[scale=0.27]{system.png}
98 | \caption{Distributed Evaluation Architecture}
99 | \label{fig:system}
100 | \end{figure}
101 | \end{frame}
102 |
103 | \section[Results]{Case Study Results}
104 |
105 | \begin{frame}{Case Study Results}
106 | \frametitle{Pareto Optimal Solutions}
107 | \begin{figure}[htbp]
108 | \centering
109 | \includegraphics[width =0.7\linewidth]{results_front.png}
110 | \caption{Pareto Optimal Front for case study run}
111 | \label{fig:results}
112 | \end{figure}
113 | \end{frame}
114 |
115 | \begin{frame}{Case Study Results}
116 | \frametitle{Pareto Optimal scenarios}
117 | \begin{table}[h]
118 | \centering
119 | \caption{Result scenarios from Pareto optimal set of solutions}
120 | \label{table:results}
121 | \begin{tabular}{|l|l|l|l|}
122 | \hline
123 | Scenario & Energy Used & Construction Cost & Energy savings \\ \hline
124 | Base & 59087 KWh & \$N/A & N/A \\ \hline
125 | Least Energy & 49592 KWh & \$9931 & 16.07\% \\ \hline
126 | Least Cost & 57415 KWh & \$9145 & 2.83\% \\ \hline
127 | Middle & 49927 KWh & \$9648 & 15.50\% \\ \hline
128 | \end{tabular}
129 | \end{table}
130 | \end{frame}
131 |
132 | \begin{frame}{Results }
133 | \frametitle{Parallel Evaluation Analysis}
134 | \vspace*{.7in}
135 | \pause
136 | \begin{block}{$T =t ∗ (N ∗p)/S$}
137 | \end{block}
138 | \pause
139 | \begin{block}{Total cost of the case study: \$1.05}
140 | \end{block}
141 | \pause
142 | \begin{block}{Slower than ANN. But better accuracy and no training period.}
143 | \end{block}
144 | \end{frame}
145 |
146 | \begin{frame}{Limitations}
147 | \frametitle{Limitations}
148 | \vspace*{.7in}
149 | \pause
150 | \begin{block}{Cost Approximations}
151 | \pause
152 | \end{block}
153 | \begin{block}{Unreliability of Distributed Systems}
154 | \pause
155 | \end{block}
156 | \begin{block}{Building Modelling File Format}
157 | \pause
158 | \end{block}
159 | \end{frame}
160 |
161 | \section[Future Work]{Future Work}
162 | \frametitle{Future Work}
163 | \begin{frame}{Future Work}
164 | \vspace*{1.0in}
165 | \pause
166 | \begin{block}{Cost Ammortization}
167 | \pause
168 | \end{block}
169 | \begin{block}{Also changing hi}
170 | \pause
171 | \end{block}
172 | \end{frame}
173 |
174 |
175 | \end{document}
176 |
177 |
--------------------------------------------------------------------------------
/remote-sublime.sublime-project:
--------------------------------------------------------------------------------
1 | {
2 | "folders":
3 | [
4 | {
5 | "name": "Source",
6 | "follow_symlinks": true,
7 | "path": "./",
8 | "folder_exclude_patterns": ["tests", "messages"],
9 | },
10 | {
11 | "name": "Tests",
12 | "follow_symlinks": true,
13 | "path": "./tests/"
14 | },
15 | {
16 | "name": "Update Messages",
17 | "follow_symlinks": true,
18 | "path": "./messages/"
19 | },
20 | ],
21 | "settings":
22 | {
23 | "tab_size": 4,
24 | "translate_tabs_to_spaces": true,
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/remote.py:
--------------------------------------------------------------------------------
1 | from . import Session
2 | import sublime, sublime_plugin
3 | import socket
4 | import sys
5 | import threading
6 |
7 | session = None
8 |
9 | class DiffListener(sublime_plugin.EventListener):
10 | """
11 | Listens for modifications to the view and gets the diffs using
12 | Operational Transformation
13 | """
14 |
15 | def on_modified_async(self, view):
16 | """Listens for modifications to views which are part of a currently
17 | active remote session."""
18 | global session
19 | if session is not None:
20 | if session.view == view:
21 | current_buffer = view.substr(sublime.Region(0, view.size()))
22 | print("diff")
23 | session.send_diffs(current_buffer)
24 |
25 | def on_close(self, view):
26 | """Check to see if views I care about are closed, and if they are,
27 | drop them from my watched-views"""
28 | global session
29 | if session is not None:
30 | if session.view == view:
31 | session.close()
32 | session = None
33 |
34 | class StartSessionCommand(sublime_plugin.TextCommand):
35 | """Command to start a new RemoteCollab session for the current view"""
36 | get_buffer = lambda view: view.substr(sublime.Region(0, view.size()))
37 |
38 | def run(self, edit):
39 | global session
40 | session = Session.Session(self.view)
41 | print ("[RemoteCollab] Started hosting session")
42 |
43 | class ConnectToSessionCommand(sublime_plugin.WindowCommand):
44 | """Command to connect to an external RemoteCollab session."""
45 |
46 | def run(self):
47 | """
48 | Show the input panel to get an IP address for the remote host.
49 | """
50 | self.window.show_input_panel(
51 | 'Session IP Address',
52 | '',
53 | self.on_done,
54 | self.on_change,
55 | self.on_cancel)
56 |
57 | def on_change(self, input):
58 | pass
59 |
60 | def on_cancel(self):
61 | pass
62 |
63 | def on_done(self, input):
64 | """
65 | Input panel handler - creates a new session connected to the given IP address.
66 | """
67 | global session
68 | session = Session.Session(self.window.new_file(), host=input)
69 |
70 | class UpdateBufferCommand(sublime_plugin.TextCommand):
71 | """
72 | Command to create an Edit object and update the buffer.
73 | """
74 | def run(self, edit, new_buffer):
75 | self.view.replace(edit, sublime.Region(0, self.view.size()), new_buffer)
76 |
77 | class DisconnectSessionCommand(sublime_plugin.ApplicationCommand):
78 | """Command to close a RemoteCollab session."""
79 |
80 | def run(self):
81 | global session
82 | session.close()
83 | session = None
84 |
85 |
--------------------------------------------------------------------------------
/remote.sublime-commands:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "caption": "Remote: Connect to Session",
4 | "command": "connect_to_session"
5 | },
6 | {
7 | "caption": "Remote: Host Session",
8 | "command": "start_session"
9 | },
10 | {
11 | "caption": "Remote: Disconnect from Session",
12 | "command": "disconnect_session"
13 | },
14 | ]
--------------------------------------------------------------------------------
/tests/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TeamRemote/remote-sublime/515cb427579cd9c02d035b9622906fc7de5446ef/tests/.DS_Store
--------------------------------------------------------------------------------
/tests/test.py:
--------------------------------------------------------------------------------
1 | import sublime, sublime_plugin
2 | from unittest import TestCase
3 |
4 | version = sublime.version()
5 |
6 | class TestDiffListener(TestCase):
7 |
8 | def setUp(self):
9 | self.view = sublime.active_window().new_file()
10 |
11 | def tearDown(self):
12 | if self.view:
13 | self.view.set_scratch(True)
14 | self.view.window().run_command("close_file")
15 |
16 | def testOnModify(self):
17 | # put actual test here
18 |
19 | pass
20 |
21 | def testOnClose(self):
22 | # insert test here
23 | pass
24 |
25 |
--------------------------------------------------------------------------------
/travis.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 | export SUBLIME_TEXT_VERSION=$1
3 | export PACKAGE="$2"
4 | export STP=$HOME/.config/sublime-text-$SUBLIME_TEXT_VERSION/Packages
5 |
6 | if [ -z $(which subl) ]; then
7 | if [ $SUBLIME_TEXT_VERSION -eq 2 ]; then
8 | echo installing sublime 2
9 | sudo add-apt-repository ppa:webupd8team/sublime-text-2 -y
10 | sudo apt-get update
11 | sudo apt-get install sublime-text -y
12 | elif [ $SUBLIME_TEXT_VERSION -eq 3 ]; then
13 | echo installing sublime 3
14 | sudo add-apt-repository ppa:webupd8team/sublime-text-3 -y
15 | sudo apt-get update
16 | sudo apt-get install sublime-text-installer -y
17 | fi
18 | fi
19 |
20 | if [ ! -d $STP ]; then
21 | echo creating sublime package directory
22 | mkdir -p $STP
23 | fi
24 |
25 | if [ ! -d $STP/$PACKAGE ]; then
26 | echo symlink the package to sublime package directory
27 | ln -s $PWD $STP/$PACKAGE
28 | fi
29 |
30 | if [ ! -d $STP/UnitTesting ]; then
31 | echo download latest UnitTesting release
32 | # for stability, you may consider a fixed version of UnitTesting, eg TAG=0.1.4
33 | TAG=`git ls-remote --tags https://github.com/randy3k/UnitTesting | sed 's|.*/\([^/]*$\)|\1|' | sort -r | head -1`
34 | git clone --branch $TAG https://github.com/randy3k/UnitTesting $STP/UnitTesting
35 | fi
36 |
--------------------------------------------------------------------------------