├── .gitignore
├── LICENSE
├── README.md
├── docker-compose.yml
├── image.png
├── migrations
├── 1608616875404_base.js
├── 1609390783696_apartment-length.js
├── 1609650077081_request-slack-ts.js
├── 1609701811032_node-memberships.js
├── 1612754047978_request-installed.js
├── 1612760719465_stripe.js
└── 1613282969996_request-tokens.js
├── netlify.toml
├── package.json
├── public
└── _redirects
├── scripts
├── ProgressBar.js
├── db.js
├── fix-bin.js
├── gml_to_pgsql.py
├── import-spreadsheet.js
├── import-stripe.js
├── los.js
└── reset-los-db.js
├── src
├── auth
│ └── index.js
├── db
│ ├── appointments.js
│ ├── buildings.js
│ ├── devices.js
│ ├── index.js
│ ├── links.js
│ ├── los.js
│ ├── map.js
│ ├── members.js
│ ├── memberships.js
│ ├── nodes.js
│ ├── panos.js
│ ├── requests.js
│ └── search.js
├── geojson
│ ├── links.js
│ └── nodes.js
├── kml
│ ├── appointments.js
│ ├── index.js
│ ├── los.js
│ ├── nodes.js
│ ├── requests.js
│ └── utils.js
├── routes
│ ├── appointments.js
│ ├── buildings.js
│ ├── device_types.js
│ ├── devices.js
│ ├── geojson.js
│ ├── kml.js
│ ├── links.js
│ ├── los.js
│ ├── map.js
│ ├── members.js
│ ├── memberships.js
│ ├── nodes.js
│ ├── panos.js
│ ├── requests.js
│ ├── search.js
│ └── webhooks.js
├── slack
│ ├── client.js
│ ├── index.js
│ └── index.spec.js
├── v1.js
└── webhooks
│ └── acuity.js
├── webpack.functions.js
└── yarn.lock
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 | functions/
3 | building_data/
4 | .env
5 | .DS_Store
6 | .netlify
7 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published
637 | by the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Mesh API
2 |
3 | 
4 |
5 | 🚧 Work in progress!
6 |
7 | ## Contributing
8 |
9 | Before committing code, please run `yarn precommit` to format your code and run the tests. Only commit your code when it's formatted and the tests pass. You can add it as a git [precommit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) if you like.
10 |
11 | ## Endpoints
12 |
13 | https://api.nycmesh.net/v1/nodes
14 | https://api.nycmesh.net/v1/links
15 | https://api.nycmesh.net/v1/buildings
16 | https://api.nycmesh.net/v1/members
17 | https://api.nycmesh.net/v1/requests
18 | https://api.nycmesh.net/v1/search
19 | https://api.nycmesh.net/v1/los
20 | https://api.nycmesh.net/v1/kml
21 |
22 | ## Architecture
23 |
24 | - Netlify Functions for hosting
25 | - Express for handling requests
26 | - PostgreSQL for main db
27 | - PostGIS for line of sight db
28 | - DigitalOcean Spaces (S3) for storing panorama images
29 | - Auth0 for access control
30 |
31 | ## Running locally
32 |
33 | Clone the repo: `git clone git@github.com:olivernyc/nycmesh-api.git`
34 | Install dependencies: `yarn install`
35 | Run the local server: `yarn start`
36 |
37 | You'll need a `.env` file with the following values:
38 |
39 | ```
40 | DATABASE_URL=postgres://$user:$pass@$host:$port/$db
41 | LOS_DATABASE_URL=postgres://$user:$pass@$host:$port/$db
42 |
43 | S3_BUCKET=
44 | S3_ENDPOINT=
45 | S3_ID=
46 | S3_KEY=
47 |
48 | JWKS_URI=
49 | JWT_AUDIENCE=
50 | JWT_ISSUER=
51 |
52 | SLACK_TOKEN=
53 | SLACK_INSTALL_CHANNEL=
54 | SLACK_PANO_CHANNEL=
55 | SLACK_REQUEST_CHANNEL=
56 |
57 | OSTICKET_API_KEY=
58 |
59 | ACUITY_USER_ID=
60 | ACUITY_API_KEY=
61 | ```
62 |
63 | ## Schema
64 |
65 | Currently, we use node numbers to represent join requests, members, and nodes. This schema is an attempt to detangle our data and create a common definition of the various components of the mesh.
66 |
67 | ### Building
68 |
69 | A physical location.
70 |
71 | | id | address | lat | lng | alt | bin | notes |
72 | | --- | ------- | --- | --- | --- | --- | ----- |
73 |
74 | ### Member
75 |
76 | A person in the mesh community. For example, a node-owner, donor or installer.
77 |
78 | | id | name | email | phone |
79 | | --- | ---- | ----- | ----- |
80 |
81 | ### Node
82 |
83 | A specific location on the network. Typically one per building.
84 |
85 | | id | lat | lng | alt | status | name | location |
86 | | --- | --- | --- | --- | ------ | ---- | -------- |
87 |
88 | - id
89 | - lat
90 | - lng
91 | - alt
92 | - status (active, dead)
93 | - name (optional) // e.g. "Saratoga", "SN1"
94 | - location (optional) // Human readable location, e.g. "Roof", "Basement"
95 | - notes (optional)
96 | - create_date
97 | - abandon_date (optional)
98 | - building_id
99 | - member_id
100 |
101 | ### Join Request
102 |
103 | - id
104 | - date
105 | - roof_access
106 | - member_id
107 | - building_id
108 |
109 | ### Panorama
110 |
111 | - id
112 | - url
113 | - date
114 | - request_id
115 |
116 | ### Device Type
117 |
118 | - id
119 | - name
120 | - manufacturer
121 | - range
122 | - width
123 |
124 | ### Device
125 |
126 | A unit of hardware. Routers, radios, servers, etc.
127 |
128 | - id
129 | - status (in stock, active, dead)
130 | - name (optional)
131 | - ssid (optional)
132 | - notes (optional)
133 | - lat
134 | - lng
135 | - alt
136 | - azimuth (direction in degrees, default 0)
137 | - create_date
138 | - abandon_date (optional)
139 | - device_type_id
140 | - node_id
141 |
142 | ### Link
143 |
144 | A connection between two devices. For example, an ethernet cable or wireless connection.
145 |
146 | - id
147 | - status (active, dead)
148 | - create_date
149 | - device_a_id
150 | - device_b_id
151 |
152 | ## Example Queries
153 |
154 | ### Most join requests by member
155 |
156 | ```sql
157 | SELECT
158 | COUNT(members.id) AS count,
159 | members.name AS member_name
160 | FROM
161 | requests
162 | RIGHT JOIN members ON requests.member_id = members.id
163 | GROUP BY
164 | members.id
165 | ORDER BY
166 | count DESC;
167 | ```
168 |
169 | ### Join requests in active node buildings
170 |
171 | ```sql
172 | SELECT
173 | SUBSTRING(buildings.address, 1, 64) AS building_address,
174 | COUNT(DISTINCT requests.member_id) AS request_count,
175 | COUNT(DISTINCT nodes.member_id) AS node_count,
176 | JSON_AGG(DISTINCT nodes.id) AS node_ids,
177 | JSON_AGG(DISTINCT members.email) AS request_emails
178 | FROM
179 | buildings
180 | JOIN requests ON buildings.id = requests.building_id
181 | JOIN members ON members.id = requests.member_id
182 | JOIN nodes ON buildings.id = nodes.building_id
183 | WHERE
184 | nodes.status = 'active'
185 | GROUP BY
186 | buildings.id
187 | HAVING
188 | COUNT(DISTINCT requests.member_id) > COUNT(DISTINCT nodes.member_id)
189 | ORDER BY
190 | request_count DESC
191 | ```
192 |
193 | ### Tallest buildings with panos
194 |
195 | ```sql
196 | SELECT
197 | buildings.alt,
198 | COUNT(DISTINCT requests.id) as request_count,
199 | SUBSTRING(buildings.address, 1, 64) as building_address,
200 | ARRAY_AGG(DISTINCT nodes.id) as node_ids,
201 | ARRAY_AGG(DISTINCT panoramas.url) as pano_ids
202 | FROM buildings
203 | JOIN requests
204 | ON buildings.id = requests.building_id
205 | FULL JOIN nodes
206 | ON buildings.id = nodes.building_id
207 | JOIN panoramas
208 | ON requests.id = panoramas.request_id
209 | WHERE requests.roof_access IS TRUE
210 | GROUP BY buildings.id
211 | ORDER BY buildings.alt DESC;
212 | ```
213 |
214 | ### Most join requests by building
215 |
216 | ```sql
217 | SELECT
218 | SUBSTRING(buildings.address, 1, 64) as building_address,
219 | COUNT(buildings.id) as count
220 | FROM requests
221 | RIGHT JOIN buildings
222 | ON requests.building_id = buildings.id
223 | GROUP BY buildings.id
224 | ORDER BY count DESC;
225 | ```
226 |
227 | ### And node count
228 |
229 | ```sql
230 | SELECT
231 | buildings.alt as building_height,
232 | -- COUNT(requests.id) as request_count,
233 | COUNT(buildings.id) as node_count,
234 | SUBSTRING (buildings.address, 1, 64) as building_address
235 | FROM nodes
236 | RIGHT JOIN buildings
237 | ON nodes.building_id = buildings.id
238 | RIGHT JOIN requests
239 | ON nodes.building_id = requests.building_id
240 | GROUP BY buildings.id
241 | ORDER BY node_count DESC;
242 | ```
243 |
244 | ### Node ids in building
245 |
246 | ```sql
247 | SELECT array_agg(id) FROM nodes WHERE nodes.building_id = \$1;
248 | ```
249 |
250 | ### Most nodes by building
251 |
252 | ```sql
253 | SELECT
254 | buildings.alt as building_height,
255 | COUNT(buildings.id) as node_count,
256 | SUBSTRING (buildings.address, 1, 64) as building_address
257 | FROM nodes
258 | RIGHT JOIN buildings
259 | ON nodes.building_id = buildings.id
260 | GROUP BY buildings.id
261 | ORDER BY node_count DESC;
262 | ```
263 |
264 | ### Nodes and join requests by building
265 |
266 | ```sql
267 | SELECT
268 | buildings.id,
269 | COUNT(DISTINCT requests.id) as request_count,
270 | COUNT(DISTINCT nodes.id) as node_count,
271 | ARRAY_AGG(DISTINCT nodes.id) as node_ids,
272 | SUBSTRING(buildings.address, 1, 64) as building_address
273 | FROM buildings
274 | JOIN requests
275 | ON buildings.id = requests.building_id
276 | JOIN nodes
277 | ON buildings.id = nodes.building_id
278 | GROUP BY buildings.id
279 | ORDER BY request_count DESC;
280 | ```
281 |
282 | ### Tallest buildings
283 |
284 | ```sql
285 | SELECT
286 | buildings.alt as building_height,
287 | COUNT(nodes.id) as node_count,
288 | SUBSTRING(buildings.address, 1, 64) as building_address
289 | FROM nodes
290 | RIGHT JOIN buildings
291 | ON nodes.building_id = buildings.id
292 | GROUP BY buildings.id
293 | ORDER BY building_height DESC;
294 | ```
295 |
296 | ### Tallest buildings with nodes
297 |
298 | ```sql
299 | SELECT
300 | buildings.id as building_id,
301 | buildings.alt as building_height,
302 | COUNT(nodes.id) as node_count,
303 | array_agg(nodes.id) as node_ids,
304 | SUBSTRING(buildings.address, 1, 64) as building_address
305 | FROM buildings
306 | LEFT JOIN nodes
307 | ON buildings.id = nodes.building_id
308 | GROUP BY buildings.id
309 | -- HAVING COUNT(nodes.id) > 0 -- Toggle this line to hide/show nodeless buildings
310 | ORDER BY building_height DESC;
311 | ```
312 |
313 | ### Tallest buildings with join requests
314 |
315 | ```sql
316 | SELECT
317 | buildings.id as building_id,
318 | buildings.alt as building_height,
319 | COUNT(requests.id) as request_count,
320 | array_agg(requests.id) as request_ids,
321 | SUBSTRING(buildings.address, 1, 64) as building_address
322 | FROM buildings
323 | LEFT JOIN requests
324 | ON buildings.id = requests.building_id
325 | GROUP BY buildings.id
326 | -- HAVING COUNT(nodes.id) > 0 -- Toggle this line to hide/show nodeless buildings
327 | ORDER BY building_height DESC;
328 | ```
329 |
330 | ## Line of Sight
331 |
332 | ### DB Setup
333 |
334 | Install lxml:
335 |
336 | ```python
337 | pip3 install lxml
338 | ```
339 |
340 | Set up the db:
341 |
342 | ```bash
343 | node scripts/reset-los-db.js
344 | ```
345 |
346 | Download the [building data](https://www1.nyc.gov/site/doitt/initiatives/3d-building.page):
347 |
348 | ```bash
349 | curl -o building_data.zip http://maps.nyc.gov/download/3dmodel/DA_WISE_GML.zip
350 | unzip building_data.zip -d building_data
351 | rm building_data.zip
352 | ```
353 |
354 | Insert the data
355 |
356 | ```bash
357 | {
358 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA1_3D_Buildings_Merged.gml buildings
359 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA2_3D_Buildings_Merged.gml buildings
360 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA3_3D_Buildings_Merged.gml buildings
361 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA4_3D_Buildings_Merged.gml buildings
362 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA5_3D_Buildings_Merged.gml buildings
363 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA6_3D_Buildings_Merged.gml buildings
364 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA7_3D_Buildings_Merged.gml buildings
365 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA8_3D_Buildings_Merged.gml buildings
366 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA9_3D_Buildings_Merged.gml buildings
367 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA10_3D_Buildings_Merged.gml buildings
368 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA11_3D_Buildings_Merged.gml buildings
369 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA12_3D_Buildings_Merged.gml buildings
370 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA13_3D_Buildings_Merged.gml buildings
371 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA14_3D_Buildings_Merged.gml buildings
372 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA15_3D_Buildings_Merged.gml buildings
373 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA16_3D_Buildings_Merged.gml buildings
374 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA17_3D_Buildings_Merged.gml buildings
375 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA18_3D_Buildings_Merged.gml buildings
376 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA19_3D_Buildings_Merged.gml buildings
377 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA20_3D_Buildings_Merged.gml buildings
378 | python3 ./scripts/gml_to_pgsql.py ./building_data/DA_WISE_GMLs/DA21_3D_Buildings_Merged.gml buildings
379 | } | psql $LOS_DATABASE_URL
380 | ```
381 |
382 | Now we are ready to make queries!
383 |
384 | ### Making Queries
385 |
386 | Let's check for line of sight between [Supernode 1 and Node 3](https://www.nycmesh.net/map/nodes/227-3).
387 |
388 | #### Step 1: Look up BINs:
389 |
390 | Use [NYC GeoSearch](https://geosearch.planninglabs.nyc/docs/) or [NYC Building Information Search](http://a810-bisweb.nyc.gov/bisweb/bispi00.jsp).
391 |
392 | Supernode 1 BIN: `1001389`
393 | Node 3 BIN: `1006184`
394 |
395 | #### Step 2: Get building midpoints:
396 |
397 | ```sql
398 | SELECT ST_AsText(ST_Centroid((SELECT geom FROM ny WHERE bldg_bin = '1001389'))) as a,
399 | ST_AsText(ST_Centroid((SELECT geom FROM ny WHERE bldg_bin = '1006184'))) as b;
400 | # a | b
401 | # ------------------------------------------+------------------------------------------
402 | # POINT(987642.232749068 203357.276907034) | POINT(983915.956115596 198271.837494287)
403 | # (1 row)
404 | ```
405 |
406 | #### Step 3: Get building heights:
407 |
408 | ```sql
409 | SELECT ST_ZMax((SELECT geom FROM ny WHERE bldg_bin = '1001389')) as a,
410 | ST_ZMax((SELECT geom FROM ny WHERE bldg_bin = '1006184')) as b;
411 | # a | b
412 | # ------------------+------------------
413 | # 582.247499999998 | 120.199699999997
414 | # (1 row)
415 | ```
416 |
417 | #### Step 4: Check for intersections:
418 |
419 | ```sql
420 | SELECT a.bldg_bin
421 | FROM ny AS a
422 | WHERE ST_3DIntersects(a.geom, ST_SetSRID('LINESTRINGZ (983915 198271 582, 987642 203357 120)'::geometry, 2263));
423 | # bldg_bin
424 | # ----------
425 | # (0 rows)
426 | ```
427 |
428 | There are no intersections. We have line of sight!
429 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.9"
2 | services:
3 | database:
4 | image: postgres:12.4
5 | ports:
6 | - "5432:5432"
7 | environment:
8 | POSTGRES_HOST_AUTH_METHOD: trust
9 |
--------------------------------------------------------------------------------
/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/meshcenter/mesh-api/a85dfd7d1555a8a6398d62a54c86ce185109c37f/image.png
--------------------------------------------------------------------------------
/migrations/1608616875404_base.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | exports.shorthands = undefined;
4 |
5 | const notNull = true;
6 |
7 | exports.up = (pgm) => {
8 | pgm.createTable("buildings", {
9 | id: "id",
10 | address: { type: "varchar(256)", notNull },
11 | lat: { type: "float", notNull },
12 | lng: { type: "float", notNull },
13 | alt: { type: "float", notNull },
14 | bin: { type: "int" },
15 | notes: { type: "text" },
16 | });
17 |
18 | pgm.createTable("members", {
19 | id: "id",
20 | name: { type: "varchar(256)" },
21 | email: { type: "varchar(256)", notNull, unique: true },
22 | phone: { type: "varchar(256)" },
23 | });
24 |
25 | pgm.createType("node_status", ["active", "inactive", "potential"]);
26 |
27 | pgm.createTable("nodes", {
28 | id: "id",
29 | lat: { type: "float", notNull },
30 | lng: { type: "float", notNull },
31 | alt: { type: "float", notNull },
32 | status: { type: "node_status", notNull },
33 | location: { type: "varchar(256)" },
34 | name: { type: "varchar(256)" },
35 | notes: { type: "text" },
36 | create_date: { type: "timestamp with time zone", notNull },
37 | abandon_date: { type: "timestamp with time zone" },
38 | building_id: { type: "integer", references: "buildings(id)", notNull },
39 | member_id: { type: "integer", references: "members(id)", notNull },
40 | });
41 |
42 | pgm.createType("request_status", ["open", "closed"]);
43 |
44 | pgm.createTable("requests", {
45 | id: "id",
46 | status: { type: "request_status", notNull, default: "open" },
47 | apartment: { type: "varchar(10)" },
48 | roof_access: { type: "bool", notNull },
49 | date: { type: "timestamp with time zone", notNull },
50 | osticket_id: { type: "integer" },
51 | building_id: { type: "integer", references: "buildings(id)", notNull },
52 | member_id: { type: "integer", references: "members(id)", notNull },
53 | });
54 |
55 | pgm.createTable("panoramas", {
56 | id: "id",
57 | url: { type: "varchar(256)", notNull },
58 | date: { type: "timestamp with time zone", notNull },
59 | request_id: { type: "integer", references: "requests(id)", notNull },
60 | });
61 |
62 | pgm.createTable("device_types", {
63 | id: "id",
64 | name: { type: "varchar(256)", notNull },
65 | manufacturer: { type: "varchar(256)" },
66 | range: { type: "float", notNull },
67 | width: { type: "float", notNull },
68 | });
69 |
70 | pgm.createType("device_status", ["active", "inactive", "potential"]);
71 |
72 | pgm.createTable("devices", {
73 | id: "id",
74 | lat: { type: "float", notNull },
75 | lng: { type: "float", notNull },
76 | alt: { type: "float", notNull },
77 | azimuth: { type: "int", default: 0 },
78 | status: { type: "device_status", notNull },
79 | name: { type: "varchar(256)" },
80 | ssid: { type: "varchar(256)" },
81 | notes: { type: "text" },
82 | create_date: { type: "timestamp with time zone" },
83 | abandon_date: { type: "timestamp with time zone" },
84 | device_type_id: {
85 | type: "integer",
86 | references: "device_types(id)",
87 | notNull,
88 | },
89 | node_id: { type: "integer", references: "nodes(id)", notNull },
90 | });
91 |
92 | pgm.createType("link_status", ["active", "inactive", "potential"]);
93 |
94 | pgm.createTable("links", {
95 | id: "id",
96 | status: { type: "link_status", notNull },
97 | create_date: { type: "timestamp with time zone", notNull },
98 | device_a_id: { type: "integer", references: "devices(id)", notNull },
99 | device_b_id: { type: "integer", references: "devices(id)", notNull },
100 | });
101 |
102 | pgm.createTable("los", {
103 | id: "id",
104 | building_a_id: { type: "integer", references: "buildings(id)", notNull },
105 | building_b_id: { type: "integer", references: "buildings(id)", notNull },
106 | lat_a: { type: "float", notNull },
107 | lng_a: { type: "float", notNull },
108 | alt_a: { type: "float", notNull },
109 | lat_b: { type: "float", notNull },
110 | lng_b: { type: "float", notNull },
111 | alt_b: { type: "float", notNull },
112 | });
113 |
114 | pgm.createType("appointment_type", ["install", "support", "survey"]);
115 |
116 | pgm.createTable("appointments", {
117 | id: "id",
118 | type: { type: "appointment_type", notNull },
119 | date: { type: "timestamp with time zone", notNull },
120 | notes: { type: "text" },
121 | request_id: { type: "integer", references: "requests(id)", notNull },
122 | member_id: { type: "integer", references: "members(id)", notNull },
123 | building_id: { type: "integer", references: "buildings(id)", notNull },
124 | node_id: { type: "integer", references: "nodes(id)" },
125 | acuity_id: { type: "integer" },
126 | slack_ts: { type: "varchar(256)" },
127 | });
128 | };
129 |
130 | exports.down = (pgm) => {
131 | // Tables
132 | const opts = { ifExists: true, cascade: true };
133 | pgm.dropTable("appointments", opts);
134 | pgm.dropTable("los", opts);
135 | pgm.dropTable("links", opts);
136 | pgm.dropTable("devices", opts);
137 | pgm.dropTable("device_types", opts);
138 | pgm.dropTable("panoramas", opts);
139 | pgm.dropTable("requests", opts);
140 | pgm.dropTable("members", opts);
141 | pgm.dropTable("buildings", opts);
142 | pgm.dropTable("nodes", opts);
143 |
144 | // Types
145 | pgm.dropType("node_status");
146 | pgm.dropType("device_status");
147 | pgm.dropType("link_status");
148 | pgm.dropType("request_status");
149 | pgm.dropType("appointment_type");
150 | };
151 |
--------------------------------------------------------------------------------
/migrations/1609390783696_apartment-length.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | exports.shorthands = undefined;
4 |
5 | exports.up = (pgm) => {
6 | pgm.alterColumn("requests", "apartment", { type: "varchar(256)" });
7 | };
8 |
9 | exports.down = (pgm) => {
10 | pgm.sql(`UPDATE requests SET apartment = substring(apartment from 1 for 10)`);
11 | pgm.alterColumn("requests", "apartment", { type: "varchar(10)" });
12 | };
13 |
--------------------------------------------------------------------------------
/migrations/1609650077081_request-slack-ts.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | exports.shorthands = undefined;
4 |
5 | exports.up = (pgm) => {
6 | pgm.addColumns("requests", {
7 | slack_ts: { type: "varchar(256)" },
8 | });
9 | };
10 |
11 | exports.down = (pgm) => {
12 | pgm.dropColumns("requests", ["slack_ts"]);
13 | };
14 |
--------------------------------------------------------------------------------
/migrations/1609701811032_node-memberships.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | exports.shorthands = undefined;
4 |
5 | exports.up = (pgm) => {
6 | pgm.createTable("memberships", {
7 | id: "id",
8 | node_id: { type: "integer", references: "nodes(id)", notNull: true },
9 | member_id: { type: "integer", references: "members(id)", notNull: true },
10 | });
11 |
12 | pgm.addConstraint("memberships", "memberships_node_id_member_id_unique", {
13 | unique: ["node_id", "member_id"],
14 | });
15 |
16 | pgm.sql(`
17 | INSERT INTO memberships (node_id, member_id) SELECT id, member_id FROM nodes WHERE member_id IS NOT NULL
18 | `);
19 |
20 | pgm.dropColumns("nodes", ["member_id"]);
21 | };
22 |
23 | exports.down = (pgm) => {
24 | pgm.addColumns("nodes", {
25 | member_id: { type: "integer", references: "members(id)" },
26 | });
27 |
28 | pgm.sql(`
29 | UPDATE nodes SET member_id = memberships.member_id FROM memberships WHERE memberships.node_id = nodes.id
30 | `);
31 |
32 | pgm.alterColumn("nodes", "member_id", { notNull: true });
33 |
34 | pgm.dropTable("memberships");
35 | };
36 |
--------------------------------------------------------------------------------
/migrations/1612754047978_request-installed.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | exports.shorthands = undefined;
4 |
5 | exports.up = (pgm) => {
6 | pgm.addTypeValue("request_status", "installed", { ifNotExists: true });
7 | };
8 |
9 | exports.down = (pgm) => {
10 | // Enum types can't be removed
11 | };
12 |
--------------------------------------------------------------------------------
/migrations/1612760719465_stripe.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | exports.shorthands = undefined;
4 |
5 | exports.up = (pgm) => {
6 | pgm.addColumns("members", {
7 | stripe_customer_id: { type: "varchar(255)" },
8 | donor: { type: "bool" },
9 | });
10 | };
11 |
12 | exports.down = (pgm) => {
13 | pgm.dropColumns("members", ["stripe_customer_id", "donor"]);
14 | };
15 |
--------------------------------------------------------------------------------
/migrations/1613282969996_request-tokens.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | exports.shorthands = undefined;
4 |
5 | exports.up = (pgm) => {
6 | pgm.createTable("request_tokens", {
7 | token: { type: "varchar(255)", notNull: true },
8 | request_id: { type: "integer", references: "requests(id)", notNull: true },
9 | });
10 | };
11 |
12 | exports.down = (pgm) => {
13 | pgm.dropTable("request_tokens");
14 | };
15 |
--------------------------------------------------------------------------------
/netlify.toml:
--------------------------------------------------------------------------------
1 | [build]
2 | command = "yarn build && yarn migrate up && yarn test"
3 | functions = "functions"
4 | publish = "public"
5 |
6 | [[headers]]
7 | for = "/*"
8 | [headers.values]
9 | Access-Control-Allow-Origin = "*"
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "nycmesh-api",
3 | "version": "1.0.0",
4 | "main": "index.js",
5 | "dependencies": {
6 | "@slack/web-api": "^5.8.0",
7 | "acuityscheduling": "^0.1.9",
8 | "aws-sdk": "^2.575.0",
9 | "cors": "^2.8.5",
10 | "date-fns": "^2.8.1",
11 | "encoding": "^0.1.13",
12 | "express": "5.0.0-alpha.8",
13 | "jsonwebtoken": "^8.5.1",
14 | "jwks-rsa": "^1.6.0",
15 | "node-fetch": "^2.6.0",
16 | "pg": "^8.0.0",
17 | "serverless-http": "^2.3.0",
18 | "stripe": "^8.134.0"
19 | },
20 | "devDependencies": {
21 | "colors": "^1.4.0",
22 | "dotenv": "^8.2.0",
23 | "dotenv-webpack": "^1.7.0",
24 | "jest": "^26.4.0",
25 | "netlify-lambda": "^1.5.1",
26 | "node-pg-migrate": "^5.9.0",
27 | "prettier": "2.2.1"
28 | },
29 | "scripts": {
30 | "start": "NODE_ENV=development netlify-lambda serve src -c webpack.functions.js",
31 | "build": "netlify-lambda build src -c webpack.functions.js",
32 | "format": "prettier --write .",
33 | "precommit": "yarn format && yarn test",
34 | "los": "node scripts/los.js",
35 | "migrate": "node-pg-migrate --reject-unauthorized false",
36 | "test": "TZ=UTC jest --setupFiles dotenv/config"
37 | },
38 | "babel": {
39 | "presets": [
40 | [
41 | "@babel/preset-env",
42 | {
43 | "targets": {
44 | "node": "current"
45 | }
46 | }
47 | ]
48 | ]
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/public/_redirects:
--------------------------------------------------------------------------------
1 | https://nycmesh-api.netlify.com/* https://api.nycmesh.net/:splat 301!
2 | / https://github.com/olivernyc/nycmesh-api 302
3 | /nycmesh.kml /v1/kml
4 | /* /.netlify/functions/:splat 200
--------------------------------------------------------------------------------
/scripts/ProgressBar.js:
--------------------------------------------------------------------------------
1 | const colors = require("colors");
2 | const readline = require("readline");
3 |
4 | const CLEAR_WHOLE_LINE = 0;
5 | const CLEAR_RIGHT_OF_CURSOR = 1;
6 |
7 | function ProgressBar(total) {
8 | this.stdout = process.stdout;
9 | this.total = total;
10 | this.chars = ["█", "░"];
11 | this.delay = 60;
12 | this.curr = 0;
13 | }
14 |
15 | ProgressBar.prototype.tick = function () {
16 | if (this.curr >= this.total) {
17 | return;
18 | }
19 |
20 | this.curr++;
21 |
22 | // schedule render
23 | if (!this.id) {
24 | this.id = setTimeout(() => this.render(), this.delay);
25 | }
26 | };
27 |
28 | ProgressBar.prototype.stop = function () {
29 | // "stop" by setting current to end so `tick` becomes noop
30 | this.curr = this.total;
31 |
32 | clearLine(this.stdout);
33 | if (this._callback) {
34 | this._callback(this);
35 | }
36 | };
37 |
38 | ProgressBar.prototype.render = function () {
39 | let ratio = this.curr / this.total;
40 | ratio = Math.min(Math.max(ratio, 0), 1);
41 |
42 | // progress without bar
43 | let bar = ` ${this.curr}/${this.total}`;
44 |
45 | // calculate size of actual bar
46 | // $FlowFixMe: investigate process.stderr.columns flow error
47 | const availableSpace = Math.max(0, this.stdout.columns - bar.length - 1);
48 | const width = Math.min(this.total, availableSpace);
49 | const completeLength = Math.round(width * ratio);
50 | const complete = colors.green("█").repeat(completeLength);
51 | const incomplete = "░".repeat(width - completeLength);
52 | bar = `${complete}${incomplete}${bar}`;
53 |
54 | toStartOfLine(this.stdout);
55 | this.stdout.write(bar);
56 | };
57 |
58 | function clearLine(stdout) {
59 | readline.clearLine(stdout, CLEAR_WHOLE_LINE);
60 | readline.cursorTo(stdout, 0);
61 | }
62 |
63 | function toStartOfLine(stdout) {
64 | readline.cursorTo(stdout, 0);
65 | }
66 |
67 | module.exports = ProgressBar;
68 |
--------------------------------------------------------------------------------
/scripts/db.js:
--------------------------------------------------------------------------------
1 | const { Pool } = require("pg");
2 | const url = require("url");
3 |
4 | let pgPool;
5 |
6 | async function createPool(connectionString) {
7 | const params = url.parse(connectionString);
8 | pgPool = new Pool({
9 | connectionString,
10 | ssl: sslOptions(params.hostname),
11 | });
12 |
13 | // See src/db/index.js
14 | function sslOptions(host) {
15 | if (host === "localhost" || host === "127.0.0.1") return false;
16 | return {
17 | rejectUnauthorized: false,
18 | mode: "require",
19 | };
20 | }
21 | }
22 |
23 | async function performQuery(text, values) {
24 | if (!pgPool) await createPool(process.env.DATABASE_URL);
25 | const client = await pgPool.connect();
26 | const result = await client.query(text, values);
27 | client.release();
28 | return result.rows;
29 | }
30 |
31 | async function insertBulk(tableName, valueNames, items, valueExtractor) {
32 | const commandText = `INSERT INTO ${tableName}`;
33 | const valueNameText = `(${valueNames.join(", ")})`; // (id, name, email)
34 | let queryText = `${commandText} ${valueNameText} VALUES `;
35 |
36 | const queryValues = [];
37 | for (let i = 0; i < items.length; i++) {
38 | const values = valueExtractor(items[i]);
39 |
40 | // TODO: This is pretty hacky.. should use named parameters instead
41 | // "($1, $2, $3, $4, $5, $6)"
42 | const oneToN = Array.from(Array(values.length), (e, i) => i + 1);
43 | const offset = queryValues.length;
44 | const indexVars = oneToN.map((i) => `$${offset + i}`).join(", ");
45 | const indexVarsText = `(${indexVars})`;
46 |
47 | queryText += indexVarsText;
48 | if (i < items.length - 1) queryText += ", ";
49 |
50 | queryValues.push(...values);
51 | }
52 |
53 | return performQuery(queryText, queryValues);
54 | }
55 |
56 | module.exports = { performQuery, insertBulk };
57 |
--------------------------------------------------------------------------------
/scripts/fix-bin.js:
--------------------------------------------------------------------------------
1 | require("dotenv").config();
2 | const fetch = require("node-fetch");
3 | const { performQuery } = require("./db");
4 |
5 | lookupBins().then(() => process.exit(0));
6 |
7 | async function lookupBins() {
8 | const buildings = await getBuildings();
9 | for (var i = buildings.length - 1; i >= 0; i--) {
10 | const building = buildings[i];
11 | if (building.bin) continue;
12 | const bin = await getBIN(building);
13 | if (bin && bin !== building.bin) {
14 | console.log(building.address);
15 | console.log(`${building.bin} => ${bin}`);
16 | await setBin(building.id, bin);
17 | }
18 | }
19 | }
20 |
21 | async function getBIN({ address, lat, lng }) {
22 | if (!address) return -1;
23 | address = address.toLowerCase();
24 | if (address.includes(", md")) return -1;
25 | if (address.includes(", canada")) return -1;
26 | if (address.includes(", ca")) return -1;
27 | if (address.includes(", ca")) return -1;
28 | if (address.includes(", ca")) return -1;
29 | if (address.includes("nj, usa")) return -1;
30 | if (address.includes(", nj")) return -1;
31 | if (address.includes(",nj")) return -1;
32 | if (address.includes(" nj 0")) return -1;
33 | if (address.indexOf("nj") === address.length - 2) return -1;
34 |
35 | if (!parseInt(address[0]) || !address.includes(" ")) {
36 | console.log(`Bad address: ${address}`);
37 | return -1;
38 | }
39 |
40 | const URIaddress = encodeURIComponent(address);
41 | const url = `https://geosearch.planninglabs.nyc/v1/search?text=${URIaddress}&focus.point.lat=${parseFloat(
42 | lat
43 | )}&focus.point.lon=${parseFloat(lng)}`;
44 | const binRes = await fetch(url);
45 | const resJSON = await binRes.json();
46 | const { features } = resJSON;
47 | if (!features.length) return -1;
48 |
49 | // Choose closest match
50 | features.sort(
51 | (a, b) =>
52 | distance(a.geometry.coordinates, [lng, lat]) -
53 | distance(b.geometry.coordinates, [lng, lat])
54 | );
55 |
56 | const { pad_bin: bin, label } = features[0].properties;
57 | return parseInt(bin);
58 |
59 | function distance(a, b) {
60 | const xDiff = a[0] - b[0];
61 | const yDiff = a[1] - b[1];
62 | return Math.sqrt(xDiff * xDiff + yDiff * yDiff);
63 | }
64 | }
65 |
66 | async function getBuildings() {
67 | return performQuery(
68 | `SELECT
69 | buildings.*,
70 | JSON_AGG(DISTINCT nodes.*) AS nodes
71 | FROM
72 | buildings
73 | LEFT JOIN nodes ON nodes.building_id = buildings.id
74 | LEFT JOIN requests ON requests.building_id = buildings.id
75 | GROUP BY
76 | buildings.id
77 | ORDER BY
78 | buildings.bin`
79 | );
80 | }
81 |
82 | async function setBin(building_id, bin) {
83 | return performQuery(`UPDATE buildings SET bin = $1 WHERE id = $2`, [
84 | bin,
85 | building_id,
86 | ]);
87 | }
88 |
--------------------------------------------------------------------------------
/scripts/gml_to_pgsql.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 |
3 | # Forked from: https://github.com/Oslandia/citygml2pgsql
4 |
5 | """Convert CityGML buildings to PostreSQL statements for insertion in table
6 | Before running the script, ensure that your database contains the right table.
7 | To create the table, run:
8 | CREATE TABLE ny(gid SERIAL PRIMARY KEY, bldg_id varchar(255), bldg_bin varchar(255), geom GEOMETRY('MULTIPOLYGONZ', 2263))
9 |
10 | USAGE
11 | gml_to_pgsql file1.gml table_name
12 | """
13 |
14 | import os
15 | import sys
16 | import re
17 | from lxml import etree
18 |
19 | def linear_ring_to_wkt(ring):
20 | '''
21 | Convert a linear ring object with vertices in 3 dimensions to well-known text.
22 | The linear ring is defined in GML by a list of space-delimited 64-bit integers for
23 | each of the vertices of the ring
24 | '''
25 |
26 | dim = 3 # x, y, z
27 | raw_coord = ring[0].text.split()
28 |
29 | # Split coordinates
30 | coord = [raw_coord[i:i+dim] for i in range(0, len(raw_coord), dim)]
31 |
32 | # If ring isn't closed, close it
33 | if coord[0] != coord[-1]:
34 | coord.append(coord[0])
35 |
36 | # Catch degenerate rings
37 | if len(coord) < 4:
38 | sys.stderr.write( 'degenerated LinearRing gml:id="'+\
39 | ring.get("{http://www.opengis.net/gml}id")+'"\n')
40 | return None
41 |
42 | # Return a list of x, y, and z coordinates that together compose the LinearRing
43 | # in string format
44 | return "(" + ",".join([" ".join(c) for c in coord]) + ")"
45 |
46 | def polygon_to_wkt(poly):
47 | '''
48 | Convert a polygon composed of multiple linear rings to well-known text
49 | '''
50 | all_rings = [linear_ring_to_wkt(ring) for ring in poly.iter(insert_namespace('LinearRing', poly)) ]
51 |
52 | # remove degenerate rings (i.e. those that have fewer than 4 vertices)
53 | sanitized = filter(None, all_rings)
54 |
55 | if not sanitized:
56 | sys.stderr.write('degenerate polygon gml:id="{}'.format(poly.get("{http://www.opengis.net/gml}id")))
57 | return None
58 |
59 | return "({})".format(",".join(sanitized))
60 |
61 | def insert_namespace(target, root):
62 | '''
63 | Given a target string, iterate through the XML tree until we find a matching tag
64 | and prepend the tag's namespace. This gives us a full tag name we can pass to
65 | etree.iter()
66 | '''
67 | for e in root.iter():
68 | if e.tag is etree.Comment:
69 | continue
70 | m = re.match(r"(.*)"+target, e.tag) if e.tag else None
71 | if m:
72 | return m.groups()[0]+target
73 | return None
74 |
75 | # def run_psql(filename, table_name, srid, lod, geometry_column="geom", building_id_column="building_id"):
76 | def run_psql(filename, table_name):
77 | '''
78 | Iterate over a list of Building objects in a CityGML file, isolate just the RoofSurface polygons,
79 | and insert them into a PostgreSQL table
80 | '''
81 | if not os.path.exists(filename):
82 | raise RuntimeError("error: cannot find "+filename)
83 |
84 | root = etree.parse(filename)
85 |
86 | # Loop over the buildings
87 | for building in root.iter(insert_namespace('Building', root)):
88 | bldg_id = building.get("{http://www.opengis.net/gml}id")
89 | bldg_bin = ""
90 |
91 | # Loop over string attributes to get BIN
92 | for attribute in building.iter("{http://www.opengis.net/citygml/generics/1.0}stringAttribute"):
93 | if attribute.attrib.get("name") == "BIN":
94 | bldg_bin = attribute[0].text
95 |
96 | # Some files use 2.0 for some reason...
97 | for attribute in building.iter("{http://www.opengis.net/citygml/generics/2.0}stringAttribute"):
98 | if attribute.attrib.get("name") == "BIN":
99 | bldg_bin = attribute[0].text
100 |
101 | # Get the polygons for this building
102 | polys = [polygon_to_wkt(p) for p in building.iter(insert_namespace('Polygon', building))]
103 |
104 | if polys != None:
105 | sql = "INSERT INTO {} (geom, bldg_id, bldg_bin) VALUES ('SRID=2263; MULTIPOLYGON({})'::geometry, '{}', '{}');".format(
106 | table_name, ','.join(polys), bldg_id, bldg_bin)
107 | print(sql)
108 | else:
109 | sys.stderr.write( 'degenerate building geometry gml:id={}'.format(bldg_id))
110 |
111 | if __name__ == '__main__':
112 | gml = sys.argv[1]
113 | table_name = sys.argv[2]
114 | sys.stderr.write("converting {}\n".format(gml))
115 | run_psql(sys.argv[1], sys.argv[2])
116 |
117 |
--------------------------------------------------------------------------------
/scripts/import-spreadsheet.js:
--------------------------------------------------------------------------------
1 | require("dotenv").config();
2 | const fetch = require("node-fetch");
3 | const { isAfter } = require("date-fns");
4 | const { insertBulk, performQuery } = require("./db");
5 |
6 | importSpreadsheet().then(() => process.exit(0));
7 |
8 | async function importSpreadsheet() {
9 | console.log("Fetching spreadsheet...");
10 | const spreadsheetRes = await fetch(process.env.SPREADSHEET_URL);
11 | const { nodes: rawNodes, links, sectors } = await spreadsheetRes.json();
12 | const nodes = rawNodes
13 | .filter((node) => node.address)
14 | .map((node) => ({
15 | ...node,
16 | memberEmail: (node.memberEmail || "").toLowerCase().trim(),
17 | }));
18 |
19 | console.log("Importing spreadsheet...");
20 | await importBuildings(nodes);
21 | await importMembers(nodes);
22 | await importNodes(nodes);
23 | await importJoinRequests(nodes);
24 | await importDevices(sectors);
25 | await importLinks(links);
26 | await importAppointments();
27 | }
28 |
29 | async function importBuildings(nodes) {
30 | const clusteredNodes = getClusteredNodes(nodes);
31 | return insertBulk(
32 | "buildings",
33 | ["address", "lat", "lng", "alt", "notes", "bin"],
34 | clusteredNodes.filter((cluster) => cluster[0].address),
35 | (cluster) => {
36 | const { id, address, coordinates, notes, bin } = cluster[0]; // TODO: Something better than first node
37 | const [lng, lat, alt] = coordinates;
38 | return [
39 | String(address).slice(0, 256),
40 | parseFloat(lat),
41 | parseFloat(lng),
42 | parseFloat(alt),
43 | notes ? String(notes) : null,
44 | bin,
45 | ];
46 | }
47 | );
48 | }
49 |
50 | async function importNodes(nodes) {
51 | const buildings = await performQuery("SELECT * FROM buildings");
52 | const buildingsByAddress = buildings.reduce((acc, cur) => {
53 | acc[cur.address] = cur;
54 | return acc;
55 | }, {});
56 |
57 | const buildingsByNodeAddress = {};
58 | const clusteredNodes = getClusteredNodes(nodes);
59 | clusteredNodes.forEach((cluster) => {
60 | const firstNode = cluster[0];
61 | const clusterBuilding = buildingsByAddress[firstNode.address];
62 | cluster.forEach((node) => {
63 | buildingsByNodeAddress[node.address] = clusterBuilding;
64 | });
65 | });
66 |
67 | const members = await performQuery("SELECT * FROM members");
68 | const membersMap = members.reduce((acc, cur) => {
69 | acc[cur.email] = cur;
70 | return acc;
71 | }, {});
72 |
73 | const actualNodes = nodes.filter(
74 | (node) =>
75 | node.status === "Installed" ||
76 | node.status === "Abandoned" ||
77 | node.status === "Unsubscribe" ||
78 | node.status === "NN Assigned"
79 | );
80 | const installedNodes = actualNodes.filter((node) => node.installDate);
81 | const validNodes = installedNodes.filter(
82 | (node) => node.address && buildingsByNodeAddress[node.address]
83 | );
84 |
85 | let maxNodeId = 1;
86 | await insertBulk(
87 | "nodes",
88 | [
89 | "id",
90 | "lat",
91 | "lng",
92 | "alt",
93 | "status",
94 | "location",
95 | "name",
96 | "notes",
97 | "create_date",
98 | "abandon_date",
99 | "building_id",
100 | ],
101 | validNodes,
102 | (node) => {
103 | if (!node.memberEmail || !membersMap[node.memberEmail.toLowerCase()]) {
104 | console.log("Node", node.id, "not found");
105 | }
106 | if (node.status !== "Installed" && !node.abandonDate) {
107 | console.log("Added abandon date to ", node.id);
108 | console.log(node.id, node.status);
109 | node.abandonDate = node.installDate;
110 | }
111 | maxNodeId = Math.max(maxNodeId, node.id);
112 | return [
113 | node.id,
114 | node.coordinates[1],
115 | node.coordinates[0],
116 | node.coordinates[2],
117 | node.status === "Installed" ? "active" : "inactive",
118 | node.address,
119 | node.name,
120 | node.notes,
121 | new Date(node.installDate),
122 | node.abandonDate ? new Date(node.abandonDate) : null,
123 | buildingsByNodeAddress[node.address].id,
124 | ];
125 | }
126 | );
127 | await performQuery(
128 | `ALTER SEQUENCE nodes_id_seq RESTART WITH ${maxNodeId + 1}`
129 | );
130 |
131 | await insertBulk(
132 | "memberships",
133 | ["node_id", "member_id"],
134 | validNodes,
135 | (node) => [node.id, membersMap[node.memberEmail.toLowerCase()].id]
136 | );
137 | }
138 |
139 | async function importDevices(devices) {
140 | const deviceTypeMap = devices.reduce((acc, cur) => {
141 | acc[cur.device] = {
142 | name: cur.device,
143 | range: cur.radius,
144 | width: cur.width,
145 | };
146 | return acc;
147 | }, {});
148 |
149 | deviceTypeMap["Unknown Device Type"] = {
150 | name: "Unknown Device Type",
151 | range: 0,
152 | width: 0,
153 | };
154 | deviceTypeMap["SXTsq 5 ac"] = { name: "SXTsq 5 ac", range: 0.3, width: 0 };
155 | deviceTypeMap["LiteBeam AC"] = { name: "LiteBeam AC", range: 2, width: 0 };
156 |
157 | const deviceTypes = Object.values(deviceTypeMap);
158 | await insertBulk(
159 | "device_types",
160 | ["name", "range", "width"],
161 | deviceTypes.filter((type) => {
162 | if (!type.name || !type.range || !type.width) {
163 | console.log(`Invalid device type:`, type);
164 | // return false;
165 | }
166 | return true;
167 | }),
168 | (type) => [type.name, type.range, type.width]
169 | );
170 |
171 | const dbDeviceTypes = await performQuery("SELECT * FROM device_types");
172 | const dbDeviceTypeMap = dbDeviceTypes.reduce((acc, cur) => {
173 | acc[cur.name] = cur;
174 | return acc;
175 | }, {});
176 |
177 | const nodes = await performQuery("SELECT * FROM nodes");
178 | const nodesMap = nodes.reduce((acc, cur) => {
179 | acc[cur.id] = cur;
180 | return acc;
181 | }, {});
182 |
183 | devices.forEach((device, index) => {
184 | const node = nodesMap[device.nodeId];
185 |
186 | if (!node) {
187 | console.log("No node for device", device);
188 | return;
189 | }
190 |
191 | if (device.device === "Omni" && node.status === "active") {
192 | devices[index].status = "active";
193 | } else if (node.status === "active" && device.status !== node.status) {
194 | console.log("mismatched status", device, node);
195 | }
196 | });
197 |
198 | // Add devices for nodes with no devices
199 | const nodeDevicesMap = devices.reduce((acc, cur) => {
200 | acc[cur.nodeId] = cur;
201 | return acc;
202 | }, {});
203 |
204 | const unknownDevices = nodes.reduce((acc, cur) => {
205 | if (!nodeDevicesMap[cur.id]) {
206 | const lowerNotes = (cur.notes || "").toLowerCase();
207 | let isUnknown = true;
208 | const isOmni = lowerNotes.includes("omni");
209 | const isSxt = lowerNotes.includes("sxt") || lowerNotes.includes("kiosk");
210 | const isLbe =
211 | lowerNotes.includes("lbe") || lowerNotes.includes("litebeam");
212 | // const isMoca = lowerNotes.includes("moca");
213 | // const isR6 = lowerNotes === "r6";
214 | // const isVPN = lowerNotes.includes("rem") || lowerNotes.includes("vpn");
215 | // const isNp7r = lowerNotes.includes("netPower Lite 7R");
216 |
217 | function addDevice(deviceName) {
218 | acc.push({
219 | status: cur.status,
220 | device: deviceName,
221 | nodeId: cur.id,
222 | });
223 | }
224 |
225 | if (isOmni) {
226 | addDevice("Omni");
227 | isUnknown = false;
228 | }
229 | if (isSxt) {
230 | addDevice("SXTsq 5 ac");
231 | isUnknown = false;
232 | }
233 | if (isLbe) {
234 | addDevice("LiteBeam AC");
235 | isUnknown = false;
236 | }
237 |
238 | if (isUnknown) {
239 | addDevice("Unknown Device Type");
240 | }
241 | }
242 | return acc;
243 | }, []);
244 |
245 | const allDevices = [
246 | ...devices.filter((device) => nodesMap[device.nodeId]), // Only import devices on nodes
247 | ...unknownDevices,
248 | ];
249 |
250 | // Import devices
251 | await insertBulk(
252 | "devices",
253 | [
254 | "status",
255 | "name",
256 | "ssid",
257 | "notes",
258 | "create_date",
259 | "abandon_date",
260 | "device_type_id",
261 | "node_id",
262 | "lat",
263 | "lng",
264 | "alt",
265 | "azimuth",
266 | ],
267 | allDevices,
268 | (device) => {
269 | const deviceNode = nodesMap[device.nodeId];
270 | if (device.status === "abandoned") {
271 | device.status = "inactive";
272 | }
273 | let actualStatus = device.status;
274 | if (deviceNode.status === "abandoned") {
275 | actualStatus = "inactive";
276 | }
277 | if (!device.status) {
278 | console.log(device);
279 | }
280 | return [
281 | actualStatus,
282 | device.name,
283 | device.ssid,
284 | device.notes,
285 | device.installDate
286 | ? new Date(device.installDate)
287 | : deviceNode.create_date,
288 | device.abandonDate
289 | ? new Date(device.abandonDate)
290 | : deviceNode.abandon_date,
291 | dbDeviceTypeMap[device.device].id,
292 | device.nodeId,
293 | deviceNode.lat,
294 | deviceNode.lng,
295 | deviceNode.alt,
296 | device.azimuth || 0,
297 | ];
298 | }
299 | );
300 | }
301 |
302 | const getDevicesQuery = `SELECT
303 | devices.*,
304 | device_types.name AS type
305 | FROM
306 | devices
307 | JOIN device_types ON device_types.id = devices.device_type_id`;
308 |
309 | async function importLinks(links) {
310 | const devices = await performQuery(getDevicesQuery);
311 | const devicesMap = devices.reduce((acc, cur) => {
312 | if (cur.type === "Unknown Device Type" && acc[cur.node_id]) return acc;
313 | acc[cur.node_id] = cur;
314 | return acc;
315 | }, {});
316 |
317 | await insertBulk(
318 | "links",
319 | ["device_a_id", "device_b_id", "status", "create_date"],
320 | links.filter((link) => link.status === "active"),
321 | (link) => {
322 | const deviceA = devicesMap[link.from];
323 | const deviceB = devicesMap[link.to];
324 | if (!deviceA || !deviceB) {
325 | throw new Error(`Device not found for node ${link.from} or ${link.to}`);
326 | }
327 | const create_date = isAfter(deviceA.create_date, deviceB.create_date)
328 | ? deviceA.create_date
329 | : deviceB.create_date;
330 |
331 | let actualStatus = link.status;
332 | if (deviceA.status !== "active" || deviceB.status !== "active") {
333 | actualStatus = "inactive";
334 | }
335 |
336 | return [deviceA.id, deviceB.id, actualStatus, create_date];
337 | }
338 | );
339 | }
340 |
341 | async function importMembers(nodes) {
342 | const emailMap = {};
343 |
344 | // Cluster nodes by email
345 | for (let i = 0; i < nodes.length; i++) {
346 | const node = nodes[i];
347 | const lowerEmail = (node.memberEmail || "").toLowerCase();
348 | emailMap[lowerEmail] = emailMap[lowerEmail] || [];
349 | emailMap[lowerEmail].push(node);
350 | }
351 |
352 | // Use first node from each cluster + filter nodes with missing member info
353 | const clusteredNodes = Object.values(emailMap)
354 | .map((email) => email[0]) // TODO: Something better
355 | .filter((node) => {
356 | if (!node.memberEmail) {
357 | console.log(`Node ${node.id} missing email`);
358 | return false;
359 | }
360 | return true;
361 | });
362 |
363 | await insertBulk(
364 | "members",
365 | ["name", "email", "phone"],
366 | clusteredNodes,
367 | (node) => [
368 | node.memberName,
369 | node.memberEmail.toLowerCase(),
370 | node.memberPhone,
371 | ]
372 | );
373 | }
374 |
375 | async function importJoinRequests(nodes) {
376 | const buildings = await performQuery("SELECT * FROM buildings");
377 | const buildingsByAddress = buildings.reduce((acc, cur) => {
378 | acc[cur.address] = cur;
379 | return acc;
380 | }, {});
381 |
382 | const members = await performQuery("SELECT * FROM members");
383 | const membersByEmail = members.reduce((acc, cur) => {
384 | acc[cur.email] = cur;
385 | return acc;
386 | }, {});
387 |
388 | const panoNodesRes = await fetch("https://node-db.netlify.com/nodes.json");
389 | const panoNodes = await panoNodesRes.json();
390 | const panoNodesMap = panoNodes.reduce((acc, cur) => {
391 | acc[cur.id] = cur;
392 | return acc;
393 | }, {});
394 |
395 | const nodesWithIDs = [];
396 | const clusteredNodes = getClusteredNodes(nodes);
397 |
398 | for (var i = 0; i < clusteredNodes.length; i++) {
399 | const cluster = clusteredNodes[i];
400 | const firstNode = cluster[0];
401 |
402 | if (!firstNode.address || !firstNode.memberEmail) {
403 | console.log(`Invalid join request: Node ${firstNode.id}`);
404 | continue;
405 | }
406 |
407 | const building = buildingsByAddress[firstNode.address];
408 | if (!building) {
409 | console.log(`Building not found: ${firstNode.address}`);
410 | continue;
411 | }
412 |
413 | for (var j = 0; j < cluster.length; j++) {
414 | const node = cluster[j];
415 | const member = membersByEmail[(node.memberEmail || "").toLowerCase()];
416 | if (!member) {
417 | console.log(
418 | `Member not found: ${node.id} ${(
419 | node.memberEmail || "no email"
420 | ).toLowerCase()}`
421 | );
422 | continue;
423 | }
424 | nodesWithIDs.push({
425 | ...node,
426 | buildingId: building.id,
427 | memberId: member.id,
428 | });
429 | }
430 | }
431 |
432 | let maxRequestId = 1;
433 | await insertBulk(
434 | "requests",
435 | [
436 | "id",
437 | "status",
438 | "apartment",
439 | "date",
440 | "roof_access",
441 | "building_id",
442 | "member_id",
443 | ],
444 | nodesWithIDs.filter((node) => {
445 | if (!node.requestDate) {
446 | console.log(`Node ${node.id} missing request date`);
447 | return false;
448 | }
449 | maxRequestId = Math.max(maxRequestId, node.id);
450 | return true;
451 | }),
452 | (node) => {
453 | const status =
454 | node.status === "Installed"
455 | ? "installed"
456 | : node.status === "Abandoned" ||
457 | node.status === "Unsubscribe" ||
458 | node.status === "Not Interested" ||
459 | node.status === "No Reply" ||
460 | node.status === "Invalid" ||
461 | node.status === "Dupe"
462 | ? "closed"
463 | : "open";
464 |
465 | if (node.apartment && node.apartment.length > 200) {
466 | console.log("too long", node);
467 | node.apartment = node.apartment.substring(0, 200);
468 | }
469 | return [
470 | node.id,
471 | status,
472 | node.apartment,
473 | new Date(node.requestDate),
474 | node.roofAccess,
475 | node.buildingId,
476 | node.memberId,
477 | ];
478 | }
479 | );
480 | await performQuery(
481 | `ALTER SEQUENCE requests_id_seq RESTART WITH ${maxRequestId + 1}`
482 | );
483 |
484 | const joinRequests = await performQuery("SELECT * FROM requests");
485 | const joinRequestsByDate = joinRequests.reduce((acc, cur) => {
486 | acc[parseInt(cur.date.getTime() / 1000)] = cur;
487 | return acc;
488 | }, {});
489 |
490 | const panoramas = panoNodes
491 | .filter((node) => node.panoramas)
492 | .reduce((acc, cur) => {
493 | const curDate = new Date(cur.requestDate);
494 | const joinRequest =
495 | joinRequestsByDate[parseInt(curDate.getTime() / 1000)];
496 | if (!joinRequest) {
497 | console.log(
498 | "Join request not found",
499 | cur.id,
500 | parseInt(curDate.getTime() / 1000)
501 | );
502 | return acc;
503 | }
504 | acc.push(
505 | ...cur.panoramas.map((file) => ({
506 | url: `https://node-db.netlify.com/panoramas/${file}`,
507 | date: new Date(cur.requestDate), // Should be date submitted
508 | joinRequestId: joinRequest.id,
509 | }))
510 | );
511 | return acc;
512 | }, []);
513 |
514 | await insertBulk(
515 | "panoramas",
516 | ["url", "date", "request_id"],
517 | panoramas,
518 | (panorama) => [panorama.url, panorama.date, panorama.joinRequestId]
519 | );
520 | }
521 |
522 | async function importPanoramas(node) {
523 | const panoramas = nodes
524 | .filter((node) => node.panoramas)
525 | .reduce((acc, cur) => {
526 | acc.push(
527 | ...cur.panoramas.map((file) => ({
528 | url: `https://node-db.netlify.com/panoramas/${file}`,
529 | date: new Date(cur.requestDate), // Should be date submitted
530 | }))
531 | );
532 | return acc;
533 | }, []);
534 | return await insertBulk(
535 | "panoramas",
536 | ["url", "date"],
537 | panoramas,
538 | (panorama) => [panorama.url, panorama.date]
539 | );
540 | }
541 |
542 | async function importAppointments() {
543 | const appointmentsRes = await fetch(
544 | `https://acuityscheduling.com/api/v1/appointments?max=25&calendarID=${process.env.ACUITY_CALENDAR_ID}`,
545 | {
546 | headers: {
547 | Authorization: `Basic ${Buffer.from(
548 | `${process.env.ACUITY_USER_ID}:${process.env.ACUITY_API_KEY}`
549 | ).toString("base64")}`,
550 | },
551 | }
552 | );
553 | const appointments = await appointmentsRes.json();
554 |
555 | const newAppointments = [];
556 | for (var i = 0; i < appointments.length; i++) {
557 | const appointment = appointments[i];
558 | const { id, email, phone, type, datetime } = appointment;
559 |
560 | // Seach all forms for values
561 | let nodeId, address, notes;
562 | appointment.forms.forEach((form) => {
563 | nodeId =
564 | (
565 | form.values.filter(
566 | (value) =>
567 | value.name === "Node Number" || value.name === "Request Number"
568 | )[0] || {}
569 | ).value || nodeId;
570 |
571 | address =
572 | (
573 | form.values.filter(
574 | (value) => value.name === "Address and Apartment #"
575 | )[0] || {}
576 | ).value || address;
577 |
578 | notes =
579 | (form.values.filter((value) => value.name === "Notes")[0] || {})
580 | .value || notes;
581 | });
582 |
583 | // Get Member
584 | let [member] = await performQuery(
585 | "SELECT * FROM members WHERE email = $1",
586 | [email.replace(/\n/g, "")]
587 | );
588 |
589 | if (!member) {
590 | [member] = await performQuery("SELECT * FROM members WHERE phone = $1", [
591 | phone,
592 | ]);
593 | if (!member) continue;
594 | }
595 |
596 | if (!nodeId) {
597 | console.log("Missing node id", appointment);
598 | continue;
599 | }
600 |
601 | // Get Buillding
602 | const buildings = await performQuery(
603 | `SELECT
604 | buildings.*
605 | FROM
606 | members
607 | JOIN requests ON requests.member_id = members.id
608 | JOIN buildings ON buildings.id = requests.building_id
609 | WHERE
610 | members.email = $1
611 | GROUP BY buildings.id`,
612 | [member.email]
613 | );
614 |
615 | let building = {};
616 | if (!buildings.length) {
617 | console.log(email);
618 | } else if (buildings.length > 1) {
619 | const [buildingNumber] = address.split(" ");
620 | const matchingAddress = buildings.filter(
621 | (b) => b.address.split(" ")[0] === buildingNumber
622 | );
623 | if (matchingAddress.length) {
624 | building = matchingAddress[0];
625 | }
626 | } else {
627 | building = buildings[0];
628 | }
629 |
630 | if (!building) {
631 | building = {};
632 | }
633 |
634 | let request = {};
635 | // Handle weird new format: 1234 (NN: 321)
636 | let sanitizedNodeId = parseInt(nodeId.split(" ")[0].replace(/[^0-9]/g, ""));
637 | sanitizedNodeId = Number.isInteger(sanitizedNodeId)
638 | ? sanitizedNodeId
639 | : null;
640 | if (sanitizedNodeId > 100000) sanitizedNodeId = null;
641 | if (sanitizedNodeId) {
642 | const [
643 | memberBuildingRequest,
644 | ] = await performQuery(
645 | "SELECT * FROM requests WHERE member_id = $1 AND building_id = $2",
646 | [member.id, building.id]
647 | );
648 | request = memberBuildingRequest;
649 | if (!request) {
650 | const [
651 | dbRequest,
652 | ] = await performQuery("SELECT * FROM requests WHERE id = $1", [
653 | sanitizedNodeId,
654 | ]);
655 | request = dbRequest || request;
656 | }
657 | }
658 |
659 | // Get node
660 | let node_id;
661 | if (request) {
662 | const [
663 | node,
664 | ] = await performQuery("SELECT * FROM nodes WHERE building_id = $1", [
665 | request.building_id,
666 | ]);
667 | if (node) {
668 | node_id = node.id;
669 | }
670 | }
671 |
672 | const typeMap = {
673 | Install: "install",
674 | Support: "support",
675 | "Site survey": "survey",
676 | "all day": "install",
677 | };
678 | const dbType = typeMap[type];
679 | newAppointments.push([
680 | dbType,
681 | datetime,
682 | notes,
683 | id,
684 | member.id,
685 | building.id,
686 | request.id || sanitizedNodeId,
687 | node_id,
688 | ]);
689 | }
690 |
691 | console.log(newAppointments);
692 |
693 | await insertBulk(
694 | "appointments",
695 | [
696 | "type",
697 | "date",
698 | "notes",
699 | "acuity_id",
700 | "member_id",
701 | "building_id",
702 | "request_id",
703 | "node_id",
704 | ],
705 | newAppointments,
706 | (appointment) => appointment
707 | );
708 | }
709 |
710 | function getClusteredNodes(nodes) {
711 | // Cluster nodes by reducing lat/lng precision
712 | const clusterMap = {};
713 | nodes.forEach((node) => {
714 | const key = geoKey(node);
715 | if (!key) return;
716 | clusterMap[key] = clusterMap[key] || [];
717 | clusterMap[key].push(node);
718 | });
719 | return Object.values(clusterMap);
720 |
721 | function geoKey(node) {
722 | const precision = 5;
723 | const [lng, lat] = node.coordinates;
724 | return `${parseFloat(lat).toFixed(precision)}-${parseFloat(lng).toFixed(
725 | precision
726 | )}`;
727 | }
728 | }
729 |
--------------------------------------------------------------------------------
/scripts/import-stripe.js:
--------------------------------------------------------------------------------
1 | require("dotenv").config();
2 | const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY);
3 |
4 | const { performQuery } = require("./db.js");
5 |
6 | importStripe();
7 |
8 | async function importStripe() {
9 | const customers = await getCustomers();
10 | const customersById = {};
11 | for (customer of customers) {
12 | customersById[customer.id] = customer;
13 | await setStripeId(customer.email, customer.id);
14 | }
15 |
16 | const subscriptions = await getSubscriptions();
17 | const validSubs = subscriptions.filter((s) =>
18 | ["active", "trialing"].includes(s.status)
19 | );
20 |
21 | for (sub of validSubs) {
22 | const customer = customersById[sub.customer];
23 | if (!customer) {
24 | console.log("missing customer", sub);
25 | } else {
26 | await setMemberAsDonor(customer.email);
27 | // console.log(customer.email);
28 | }
29 | }
30 | }
31 |
32 | async function getSubscriptions() {
33 | const allSubscriptions = [];
34 | let hasMore = true;
35 | let startingAfter = null;
36 | while (hasMore) {
37 | console.log("Fetching subscriptions...");
38 | const params = {
39 | limit: 100,
40 | };
41 | if (startingAfter) {
42 | params.starting_after = startingAfter;
43 | }
44 | const subscriptions = await stripe.subscriptions.list(params);
45 | allSubscriptions.push(...subscriptions.data);
46 | hasMore = subscriptions.has_more;
47 | startingAfter = subscriptions.data[subscriptions.data.length - 1].id;
48 | }
49 | return allSubscriptions;
50 | }
51 |
52 | async function getCustomers() {
53 | const allCustomers = [];
54 | let hasMore = true;
55 | let startingAfter = null;
56 | while (hasMore) {
57 | console.log("Fetching customers...");
58 | const params = {
59 | limit: 100,
60 | };
61 | if (startingAfter) {
62 | params.starting_after = startingAfter;
63 | }
64 | const customers = await stripe.customers.list(params);
65 | allCustomers.push(...customers.data);
66 | hasMore = customers.has_more;
67 | startingAfter = customers.data[customers.data.length - 1].id;
68 | }
69 | return allCustomers;
70 | }
71 |
72 | async function setStripeId(email, id) {
73 | await performQuery(
74 | "UPDATE members SET stripe_customer_id = $2 WHERE email = $1",
75 | [email, id]
76 | );
77 | }
78 |
79 | async function setMemberAsDonor(email) {
80 | await performQuery("UPDATE members SET donor = true WHERE email = $1", [
81 | email,
82 | ]);
83 | }
84 |
--------------------------------------------------------------------------------
/scripts/los.js:
--------------------------------------------------------------------------------
1 | const fetch = require("node-fetch");
2 | require("dotenv").config();
3 | const { performQuery } = require("./db");
4 | const ProgressBar = require("./ProgressBar");
5 |
6 | checkLOS().then(() => process.exit(0));
7 |
8 | let bar;
9 | let processed = 0;
10 | let notFound = 0;
11 | let total = 0;
12 |
13 | async function checkLOS() {
14 | const requests = await getRequests();
15 | bar = new ProgressBar(requests.length);
16 | bar.render();
17 |
18 | for (var i = requests.length - 1; i >= 0; i--) {
19 | const request = requests[i];
20 | await handleRequest(request);
21 | bar.curr = ++total;
22 | bar.render();
23 | }
24 |
25 | console.log("\n");
26 | console.log(`${processed} buildings processed`);
27 | console.log(`${notFound} buildings not found`);
28 | }
29 |
30 | async function handleRequest(request) {
31 | let skip = false;
32 | if (!request.bin) skip = true;
33 | if (!request.roof_access) skip = true;
34 | if (request.bin < 0 || request.bin % 1000000 === 0) skip = true;
35 | if (
36 | request.device_types.filter(
37 | (device_type) =>
38 | device_type &&
39 | ["Omni", "LBE120", "SN1Sector1", "SN1Sector2"].indexOf(
40 | device_type.name
41 | ) > -1
42 | ).length
43 | )
44 | skip = true;
45 | if ([3946, 1932, 1933].indexOf(request.id) > -1) skip = true;
46 | if (skip) {
47 | processed++;
48 | return;
49 | }
50 |
51 | const url = `http://localhost:9000/v1/los?bin=${request.bin}`;
52 | const losResponse = await fetch(url);
53 | const {
54 | visibleOmnis,
55 | visibleSectors,
56 | visibleRequests,
57 | error,
58 | } = await losResponse.json();
59 | if (error) {
60 | if (error === "Not found") {
61 | notFound++;
62 | return;
63 | }
64 | throw Error(error);
65 | } else {
66 | processed++;
67 | }
68 | }
69 |
70 | async function getRequests() {
71 | return performQuery(
72 | `SELECT
73 | requests.*,
74 | buildings.bin,
75 | buildings.lat,
76 | buildings.lng,
77 | buildings.alt,
78 | json_agg(json_build_object('id', panoramas.id, 'url', panoramas.url, 'date', panoramas.date)) AS panoramas,
79 | json_agg(device_types.*) AS device_types
80 | FROM
81 | requests
82 | LEFT JOIN buildings ON requests.building_id = buildings.id
83 | LEFT JOIN panoramas ON requests.id = panoramas.request_id
84 | LEFT JOIN nodes ON nodes.building_id = buildings.id
85 | LEFT JOIN devices ON devices.node_id = nodes.id
86 | LEFT JOIN device_types ON devices.device_type_id = device_types.id
87 | WHERE
88 | requests.status = 'open'
89 | GROUP BY
90 | requests.id,
91 | buildings.id
92 | ORDER BY
93 | requests.id`
94 | );
95 | }
96 |
--------------------------------------------------------------------------------
/scripts/reset-los-db.js:
--------------------------------------------------------------------------------
1 | require("dotenv").config();
2 | const { Pool } = require("pg");
3 | const url = require("url");
4 |
5 | createTables().then(() => process.exit(0));
6 |
7 | async function createTables() {
8 | const pool = createPool(process.env.LOS_DATABASE_URL);
9 | await performQuery(pool, "CREATE EXTENSION IF NOT EXISTS postgis");
10 | await performQuery(pool, "CREATE EXTENSION IF NOT EXISTS postgis_sfcgal");
11 | await performQuery(pool, "DROP TABLE IF EXISTS buildings");
12 | await performQuery(
13 | pool,
14 | "CREATE TABLE IF NOT EXISTS buildings(gid SERIAL PRIMARY KEY, bldg_id varchar(255), bldg_bin varchar(255), geom GEOMETRY('MULTIPOLYGONZ', 2263))"
15 | );
16 | await performQuery(
17 | pool,
18 | "CREATE INDEX IF NOT EXISTS geom_index ON buildings USING GIST (geom)"
19 | );
20 | await performQuery(
21 | pool,
22 | "CREATE INDEX IF NOT EXISTS bin_index ON buildings (bldg_bin)"
23 | );
24 | }
25 |
26 | async function performQuery(pool, text, values) {
27 | const client = await pool.connect();
28 | const result = await client.query(text, values);
29 | client.release();
30 | return result.rows;
31 | }
32 |
33 | function createPool(connectionString) {
34 | const params = url.parse(connectionString);
35 | return new Pool({
36 | connectionString,
37 | ssl: sslOptions(params.hostname),
38 | });
39 |
40 | // See src/db/index.js
41 | function sslOptions(host) {
42 | if (host === "localhost" || host === "127.0.0.1") return false;
43 | return {
44 | rejectUnauthorized: false,
45 | mode: "require",
46 | };
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/auth/index.js:
--------------------------------------------------------------------------------
1 | import jwt from "jsonwebtoken";
2 | import jwksClient from "jwks-rsa";
3 |
4 | export async function checkAuth(headers) {
5 | if (!headers.authorization) throw new Error("Unauthorized");
6 |
7 | const [scheme, token] = headers.authorization.split(" ");
8 | if (!scheme || !token)
9 | throw new Error("Format is Authorization: Bearer [token]");
10 | const validScheme = /^Bearer$/i.test(scheme);
11 | if (!validScheme) throw new Error("Format is Authorization: Bearer [token]");
12 | if (!token) throw new Error("No authorization token was found.");
13 |
14 | return verifyToken(token);
15 | }
16 |
17 | async function verifyToken(token) {
18 | const client = jwksClient({
19 | cache: true,
20 | rateLimit: true,
21 | jwksRequestsPerMinute: 5,
22 | jwksUri: process.env.JWKS_URI,
23 | });
24 |
25 | function getKey(header, callback) {
26 | client.getSigningKey(header.kid, (err, key) => {
27 | var signingKey = key.publicKey || key.rsaPublicKey;
28 | callback(null, signingKey);
29 | });
30 | }
31 |
32 | return jwt.verify(
33 | token,
34 | getKey,
35 | {
36 | audience: process.env.JWT_AUDIENCE,
37 | issuer: process.env.JWT_ISSUER,
38 | algorithm: "RS256",
39 | },
40 | (err, decoded) => {
41 | if (err) throw err;
42 | return decoded;
43 | }
44 | );
45 | }
46 |
--------------------------------------------------------------------------------
/src/db/appointments.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 | import { getBuilding } from "./buildings";
3 | import { createNode } from "./nodes";
4 |
5 | export async function getAppointments() {
6 | const appointments = await performQuery(`SELECT
7 | appointments.*,
8 | to_json(buildings) AS building,
9 | to_json(requests) AS request,
10 | to_json(members) AS member
11 | FROM
12 | appointments
13 | JOIN
14 | buildings ON buildings.id = appointments.building_id
15 | JOIN
16 | requests ON requests.id = appointments.request_id
17 | JOIN
18 | members ON members.id = appointments.member_id
19 | ORDER BY
20 | appointments.date`);
21 | return appointments;
22 | }
23 |
24 | export async function getAppointment(id) {
25 | const [appointment] = await performQuery(
26 | `SELECT
27 | appointments.*,
28 | to_json(buildings) AS building,
29 | to_json(members) AS member,
30 | to_json(nodes) AS node,
31 | COALESCE(
32 | (
33 | SELECT
34 | jsonb_build_object(
35 | 'id', requests.id,
36 | 'status', requests.status,
37 | 'apartment', requests.apartment,
38 | 'date', requests.date,
39 | 'roof_access', requests.roof_access,
40 | 'member', to_json(members.*)
41 | )
42 | FROM
43 | requests
44 | JOIN
45 | members ON members.id = requests.member_id
46 | WHERE
47 | requests.id = appointments.request_id
48 | ),
49 | '[]'
50 | ) AS request
51 | FROM
52 | appointments
53 | LEFT JOIN buildings ON appointments.building_id = buildings.id
54 | LEFT JOIN members ON appointments.member_id = members.id
55 | LEFT JOIN nodes ON appointments.node_id = nodes.id
56 | WHERE
57 | appointments.id = $1
58 | GROUP BY
59 | appointments.id,
60 | buildings.id,
61 | members.id,
62 | nodes.id`,
63 | [id]
64 | );
65 | return appointment;
66 | }
67 |
68 | export async function getAppointmentByAcuityId(acuity_id) {
69 | const [
70 | idAppointment,
71 | ] = await performQuery(
72 | "SELECT id FROM appointments WHERE appointments.acuity_id = $1",
73 | [acuity_id]
74 | );
75 | return authorizedGetAppointment(idAppointment.id);
76 | }
77 |
78 | export async function createAppointment(appointment) {
79 | // TODO: Allocate node if none in building
80 | const building = await getBuilding(appointment.building_id);
81 | if (!building.nodes.length) {
82 | const node = await createNode({
83 | lat: building.lat,
84 | lng: building.lng,
85 | alt: building.alt,
86 | status: "potential",
87 | building_id: building.id,
88 | member_id: appointment.member_id,
89 | });
90 | appointment.node_id = node.id;
91 | } else {
92 | const [buildingNode] = building.nodes;
93 | appointment.node_id = buildingNode.id;
94 | }
95 |
96 | const [newAppointment] = await performQuery(
97 | `INSERT INTO appointments (type, date, notes, member_id, building_id, request_id, node_id, acuity_id)
98 | VALUES($1, $2, $3, $4, $5, $6, $7, $8)
99 | RETURNING
100 | *`,
101 | [
102 | appointment.type,
103 | appointment.date,
104 | appointment.notes,
105 | appointment.member_id,
106 | appointment.building_id,
107 | appointment.request_id,
108 | appointment.node_id,
109 | appointment.acuity_id,
110 | ]
111 | );
112 | return newAppointment;
113 | }
114 |
115 | export async function updateAppointment(appointment) {
116 | const [updatedAppointment] = await performQuery(
117 | `UPDATE
118 | appointments
119 | SET
120 | type = $2,
121 | date = $3,
122 | notes = $4,
123 | member_id = $5,
124 | building_id = $6,
125 | request_id = $7,
126 | acuity_id = $8,
127 | slack_ts = $9
128 | WHERE
129 | id = $1
130 | RETURNING
131 | *`,
132 | [
133 | appointment.id,
134 | appointment.type,
135 | appointment.date,
136 | appointment.notes,
137 | appointment.member_id,
138 | appointment.building_id,
139 | appointment.request_id,
140 | appointment.acuity_id,
141 | appointment.slack_ts,
142 | ]
143 | );
144 | return updatedAppointment;
145 | }
146 |
--------------------------------------------------------------------------------
/src/db/buildings.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function getBuildings() {
4 | return performQuery(`SELECT
5 | buildings.*,
6 | JSON_AGG(DISTINCT nodes.*) AS nodes
7 | FROM
8 | buildings
9 | LEFT JOIN nodes ON nodes.building_id = buildings.id
10 | GROUP BY
11 | buildings.id
12 | ORDER BY
13 | COUNT(DISTINCT nodes.*) DESC`);
14 | }
15 |
16 | export async function getBuilding(id) {
17 | if (!Number.isInteger(parseInt(id, 10))) throw new Error("Bad params");
18 | const [building] = await performQuery(
19 | `SELECT
20 | buildings.*,
21 | COALESCE(
22 | (SELECT
23 | json_agg(
24 | json_build_object(
25 | 'id', nodes.id,
26 | 'lat', nodes.lat,
27 | 'lng', nodes.lng,
28 | 'status', nodes.status,
29 | 'name', nodes.name,
30 | 'notes', nodes.notes,
31 | 'devices', COALESCE
32 | (
33 | (
34 | SELECT
35 | json_agg(
36 | json_build_object(
37 | 'id', devices.id,
38 | 'type', device_types,
39 | 'lat', devices.lat,
40 | 'lng', devices.lng,
41 | 'azimuth', devices.azimuth,
42 | 'status', devices.status
43 | )
44 | )
45 | FROM
46 | devices
47 | JOIN device_types ON device_types.id = devices.device_type_id
48 | WHERE devices.node_id = nodes.id
49 | ),
50 | '[]'
51 | )
52 | )
53 | )
54 | FROM
55 | nodes
56 | WHERE
57 | nodes.building_id = $1
58 | ),
59 | '[]'
60 | ) AS nodes,
61 | COALESCE(
62 | (
63 | SELECT
64 | JSON_AGG(
65 | DISTINCT jsonb_build_object(
66 | 'id', requests.id,
67 | 'status', requests.status,
68 | 'apartment', requests.apartment,
69 | 'date', requests.date,
70 | 'roof_access', requests.roof_access,
71 | 'member', TO_JSON(members.*)
72 | )
73 | )
74 | FROM
75 | requests
76 | JOIN
77 | members ON members.id = requests.member_id
78 | WHERE
79 | requests.building_id = $1
80 | ),
81 | '[]'
82 | ) AS requests,
83 | COALESCE(json_agg(panoramas ORDER BY panoramas.date DESC) FILTER (WHERE panoramas IS NOT NULL), '[]') AS panoramas
84 | FROM
85 | buildings
86 | LEFT JOIN nodes ON nodes.building_id = buildings.id
87 | LEFT JOIN requests ON requests.building_id = buildings.id
88 | LEFT JOIN panoramas ON panoramas.request_id = requests.id
89 | WHERE buildings.id = $1
90 | GROUP BY buildings.id`,
91 | [id]
92 | );
93 | if (!building) throw new Error("Not found");
94 | return building;
95 | }
96 |
97 | export async function updateBuilding(id, patch) {
98 | const existingBuilding = await getBuilding(id, true);
99 |
100 | // TODO: Sanitize / validate new values!!
101 |
102 | const newBuilding = {
103 | ...existingBuilding,
104 | ...patch,
105 | };
106 |
107 | await performQuery(
108 | `UPDATE
109 | buildings
110 | SET
111 | address = $2,
112 | lat = $3,
113 | lng = $4,
114 | alt = $5,
115 | bin = $6,
116 | notes = $7
117 | WHERE
118 | id = $1
119 | RETURNING
120 | *`,
121 | [
122 | id,
123 | newBuilding.address,
124 | newBuilding.lat,
125 | newBuilding.lng,
126 | newBuilding.alt,
127 | newBuilding.bin,
128 | newBuilding.notes,
129 | ]
130 | );
131 | return getBuilding(id);
132 | }
133 |
--------------------------------------------------------------------------------
/src/db/devices.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function getDevice(id) {
4 | const [device] = await performQuery(
5 | `SELECT
6 | devices.*,
7 | to_json(device_types) AS TYPE,
8 | to_json(nodes) AS node
9 | FROM
10 | devices
11 | JOIN device_types ON device_types.id = devices.device_type_id
12 | JOIN nodes ON nodes.id = devices.node_id
13 | WHERE
14 | devices.id = $1`,
15 | [id]
16 | );
17 | if (!device) throw new Error("Not found");
18 | return device;
19 | }
20 |
21 | export async function getDevices() {
22 | const devices = await performQuery(`SELECT
23 | devices.*,
24 | to_json(device_types) AS TYPE,
25 | to_json(nodes) AS node
26 | FROM
27 | devices
28 | JOIN device_types ON device_types.id = devices.device_type_id
29 | JOIN nodes ON nodes.id = devices.node_id`);
30 | return devices;
31 | }
32 |
33 | export async function createDevice({
34 | lat,
35 | lng,
36 | alt,
37 | azimuth,
38 | name,
39 | ssid,
40 | notes,
41 | device_type_id,
42 | node_id,
43 | }) {
44 | const status = "active";
45 | const create_date = new Date();
46 | return performQuery(
47 | `INSERT INTO devices (lat, lng, alt, azimuth, status, name, ssid, notes, create_date, device_type_id, node_id)
48 | VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
49 | RETURNING
50 | *`,
51 | [
52 | lat,
53 | lng,
54 | alt,
55 | azimuth,
56 | status,
57 | name,
58 | ssid,
59 | notes,
60 | create_date,
61 | device_type_id,
62 | node_id,
63 | ]
64 | );
65 | }
66 |
67 | export async function updateDevice(id, patch) {
68 | const device = await getDevice(id);
69 | const newDevice = {
70 | ...device,
71 | ...patch,
72 | };
73 | return performQuery(
74 | `UPDATE
75 | devices
76 | SET
77 | lat = $2,
78 | lng = $3,
79 | alt = $4,
80 | azimuth = $5,
81 | status = $6,
82 | name = $7,
83 | ssid = $8,
84 | notes = $9,
85 | abandon_date = $11
86 | WHERE
87 | id = $1
88 | RETURNING
89 | *`,
90 | [
91 | id,
92 | newDevice.lat,
93 | newDevice.lng,
94 | newDevice.alt,
95 | newDevice.azimuth,
96 | newDevice.status,
97 | newDevice.name,
98 | newDevice.ssid,
99 | newDevice.notes,
100 | newDevice.abandon_date,
101 | ]
102 | );
103 | }
104 |
105 | export async function deleteDevice(id) {
106 | const deviceLinks = await performQuery(
107 | `SELECT * FROM links WHERE links.device_a_id = $1 OR links.device_b_id = $1`,
108 | [id]
109 | );
110 | if (deviceLinks.length) throw new Error("Device has active links.");
111 | const device = await getDevice(id);
112 | return performQuery(`DELETE FROM devices WHERE id = $1 RETURNING *`, [id]);
113 | }
114 |
--------------------------------------------------------------------------------
/src/db/index.js:
--------------------------------------------------------------------------------
1 | import { Pool } from "pg";
2 | import url from "url";
3 |
4 | let pool;
5 | let losPool;
6 |
7 | async function createPool(connectionString) {
8 | const params = url.parse(connectionString);
9 | return new Pool({
10 | connectionString,
11 | ssl: sslOptions(params.hostname),
12 | });
13 | }
14 |
15 | // Hacky way to disable ssl when running locally
16 | // TODO: get ssl running locally
17 | // TODO: Figure out how to verify the key
18 | function sslOptions(host) {
19 | if (host === "localhost" || host === "127.0.0.1") return false;
20 | return {
21 | rejectUnauthorized: false,
22 | mode: "require",
23 | };
24 | }
25 |
26 | export async function performQuery(text, values) {
27 | if (!pool) {
28 | pool = await createPool(process.env.DATABASE_URL);
29 | }
30 | const client = await pool.connect();
31 | const result = await client.query(text, values);
32 | client.release();
33 | return result.rows;
34 | }
35 |
36 | export async function performLosQuery(text, values) {
37 | if (!losPool) {
38 | losPool = await createPool(process.env.LOS_DATABASE_URL);
39 | }
40 | const client = await losPool.connect();
41 | const result = await client.query(text, values);
42 | client.release();
43 | return result.rows;
44 | }
45 |
46 | export async function end() {
47 | if (pool) {
48 | await pool.end();
49 | }
50 |
51 | if (losPool) {
52 | await losPool.end();
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/src/db/links.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function getLinks() {
4 | return performQuery(`SELECT
5 | links.*,
6 | json_agg(nodes) as nodes,
7 | json_agg(json_build_object('id', devices.id, 'type', device_types, 'lat', devices.lat, 'lng', devices.lng, 'alt', devices.alt, 'azimuth', devices.azimuth, 'status', devices.status, 'name', devices.name, 'ssid', devices.ssid, 'notes', devices.notes, 'create_date', devices.create_date, 'abandon_date', devices.abandon_date)) AS devices
8 | FROM
9 | links
10 | JOIN devices ON devices.id IN(links.device_a_id, links.device_b_id)
11 | JOIN device_types ON device_types.id = devices.device_type_id
12 | JOIN nodes ON nodes.id = devices.node_id
13 | GROUP BY
14 | links.id`);
15 | }
16 |
17 | export async function getLink(id) {
18 | const link = await performQuery(
19 | `SELECT
20 | links.*,
21 | json_agg(nodes) as nodes,
22 | json_agg(json_build_object('id', devices.id, 'type', device_types, 'lat', devices.lat, 'lng', devices.lng, 'alt', devices.alt, 'azimuth', devices.azimuth, 'status', devices.status, 'name', devices.name, 'ssid', devices.ssid, 'notes', devices.notes, 'create_date', devices.create_date, 'abandon_date', devices.abandon_date)) AS devices
23 | FROM
24 | links
25 | JOIN devices ON devices.id IN(links.device_a_id, links.device_b_id)
26 | JOIN device_types ON device_types.id = devices.device_type_id
27 | JOIN nodes ON nodes.id = devices.node_id
28 | WHERE
29 | links.id = $1
30 | GROUP BY
31 | links.id`,
32 | [id]
33 | );
34 | if (!link) throw new Error("Not found");
35 | return link;
36 | }
37 |
38 | export async function createLink({ device_a_id, device_b_id }) {
39 | const [
40 | newLink,
41 | ] = await performQuery(
42 | `INSERT INTO links (device_a_id, device_b_id, status, create_date) VALUES($1, $2, $3, $4) RETURNING *`,
43 | [device_a_id, device_b_id, "active", new Date()]
44 | );
45 | return getLink(newLink.id);
46 | }
47 |
48 | export async function deleteLink({ id }) {
49 | await getLink(id);
50 | return performQuery(`DELETE FROM links WHERE id = $1 RETURNING *`, [id]);
51 | }
52 |
--------------------------------------------------------------------------------
/src/db/los.js:
--------------------------------------------------------------------------------
1 | import { performQuery, performLosQuery } from ".";
2 |
3 | const KM = 3280.84; // feet
4 | const OMNI_RANGE = 0.75 * KM;
5 | const SECTOR_RANGE = 3 * KM;
6 | const REQUEST_RANGE = 3 * KM;
7 |
8 | export async function getLos(bin) {
9 | if (!bin) throw Error("Bad params");
10 |
11 | // TODO: One query, and use range of device
12 | const omnis = await getOmnis();
13 | const sectors = await getSectors();
14 | const requests = await getRequests();
15 |
16 | const building = await getBuildingFromBIN(bin);
17 | const buildingMidpoint = await getBuildingMidpoint(bin);
18 | const buildingHeight = await getBuildingHeight(bin);
19 | const buildingHeightMeters = await getBuildingHeightMeters(bin);
20 |
21 | const omnisInRange = await getNodesInRange(omnis, bin, OMNI_RANGE); // 0.4 miles
22 | const sectorsInRange = await getNodesInRange(sectors, bin, SECTOR_RANGE); // 1.5 miles
23 | const requestsInRange = await getNodesInRange(requests, bin, REQUEST_RANGE);
24 |
25 | // TODO: Dedupe code
26 | const visibleOmnis1 = [];
27 | await addVisible(omnisInRange, visibleOmnis1);
28 | const visibleOmnis = visibleOmnis1.filter((node) => node.bin !== bin);
29 |
30 | // TODO: Dedupe code
31 | let visibleSectors1 = [];
32 | await addVisible(sectorsInRange, visibleSectors1);
33 | const visibleSectors = visibleSectors1.filter((node) => node.bin !== bin);
34 |
35 | // TODO: Dedupe code
36 | const visibleRequests1 = [];
37 | await addVisible(requestsInRange, visibleRequests1);
38 | const visibleRequests = visibleRequests1.filter(
39 | (request) => request.bin !== bin
40 | );
41 |
42 | // Only save los if building is in db... for now
43 | if (building) {
44 | const allVisible = [...visibleOmnis, ...visibleSectors, ...visibleRequests];
45 | const saved = {};
46 | for (let j = 0; j < allVisible.length; j++) {
47 | const visibleNode = allVisible[j];
48 | if (!saved[visibleNode.building_id]) {
49 | await saveLOS(building, visibleNode);
50 | saved[visibleNode.building_id] = true;
51 | }
52 | }
53 | }
54 |
55 | return {
56 | buildingHeight: buildingHeightMeters,
57 | visibleOmnis,
58 | visibleSectors,
59 | visibleRequests,
60 | omnisInRange,
61 | sectorsInRange,
62 | };
63 |
64 | async function addVisible(nodes, visible) {
65 | for (let i = 0; i < nodes.length; i++) {
66 | const node = nodes[i];
67 | const { midpoint, alt } = node;
68 | if (parseInt(node.bin) % 1000000 === 0) continue; // Invalid bin
69 | const { coordinates } = await JSON.parse(midpoint);
70 | const [lat, lng] = coordinates;
71 | const nodeMidpoint = [lat, lng];
72 | const nodeHeight = await getBuildingHeight(node.bin);
73 | const intersections = await getIntersections(
74 | buildingMidpoint,
75 | buildingHeight,
76 | bin,
77 | nodeMidpoint,
78 | nodeHeight,
79 | node.bin
80 | );
81 |
82 | if (!intersections.length) {
83 | const distance = await getDistance(buildingMidpoint, nodeMidpoint);
84 | visible.push({
85 | ...node,
86 | distance,
87 | });
88 | }
89 | }
90 | }
91 | }
92 |
93 | async function getBuildingMidpoint(bin) {
94 | const text =
95 | "SELECT ST_AsText(ST_Centroid((SELECT geom FROM buildings WHERE bldg_bin = $1)))";
96 | const values = [bin];
97 | const res = await performLosQuery(text, values);
98 | if (!res.length) throw new Error("Not found");
99 | const { st_astext } = res[0];
100 | if (!st_astext) throw new Error("Not found");
101 | const rawText = st_astext.replace("POINT(", "").replace(")", ""); // Do this better
102 | const [lat, lng] = rawText.split(" ");
103 | return [parseFloat(lat), parseFloat(lng)];
104 | }
105 |
106 | export async function getBuildingHeight(bin) {
107 | try {
108 | const text =
109 | "SELECT ST_ZMax((SELECT geom FROM buildings WHERE bldg_bin = $1))";
110 | const values = [bin];
111 | const res = await performLosQuery(text, values);
112 | if (!res.length) throw new Error("Not found");
113 | const { st_zmax } = res[0];
114 | const offset = 2;
115 | return parseInt(st_zmax) + offset;
116 | } catch (error) {
117 | console.log(error);
118 | return -1;
119 | }
120 | }
121 |
122 | export async function getBuildingHeightMeters(bin) {
123 | const buildingHeight = await getBuildingHeight(bin);
124 | const buildingHeightMeters = parseInt(buildingHeight * 0.3048);
125 | return buildingHeightMeters;
126 | }
127 |
128 | async function getNodesInRange(nodes, bin, range) {
129 | const nodeBins = nodes
130 | .map((node) => node.bin)
131 | .filter((bin) => bin % 1000000 !== 0);
132 | const losNodesInRange = await performLosQuery(
133 | `SELECT
134 | bldg_bin as bin,
135 | ST_AsGeoJSON(ST_Centroid(geom)) as midpoint
136 | FROM (
137 | SELECT
138 | *
139 | FROM
140 | buildings
141 | WHERE
142 | bldg_bin = ANY ($1)) AS hubs
143 | WHERE
144 | ST_DWithin (ST_Centroid(geom), (
145 | SELECT
146 | ST_Centroid(geom)
147 | FROM
148 | buildings
149 | WHERE
150 | bldg_bin = $2), $3)`,
151 | [nodeBins, bin, range]
152 | );
153 | const losNodesInRangeMap = losNodesInRange.reduce((acc, cur) => {
154 | acc[cur.bin] = cur;
155 | return acc;
156 | }, {});
157 |
158 | const nodesInRangeBins = losNodesInRange.map((node) => node.bin);
159 |
160 | const nodesInRange = nodes.filter((node) =>
161 | nodesInRangeBins.includes(String(node.bin))
162 | );
163 | const nodesInRangeWithMidpoint = nodesInRange.map((node) => ({
164 | ...node,
165 | midpoint: losNodesInRangeMap[node.bin].midpoint,
166 | }));
167 |
168 | return nodesInRangeWithMidpoint;
169 | }
170 |
171 | async function getIntersections(
172 | midpoint1,
173 | height1,
174 | bin1,
175 | midpoint2,
176 | height2,
177 | bin2
178 | ) {
179 | const [x1, y1] = midpoint1;
180 | const [x2, y2] = midpoint2;
181 | // const distance = await getDistance(midpoint1, midpoint2);
182 | // const FREQUENCY = 5; // GHz
183 | // const MILES_FEET = 5280;
184 | // const fresnelRadius =
185 | // 72.05 * Math.sqrt(distance / MILES_FEET / (4 * FREQUENCY));
186 | const text = `SELECT
187 | a.bldg_bin as bin
188 | FROM
189 | buildings AS a
190 | WHERE
191 | ST_3DIntersects (a.geom, ST_SetSRID(ST_GeomFromText('LINESTRINGZ(${x1} ${y1} ${height1}, ${x2} ${y2} ${height2})'), 2263))
192 | AND bldg_bin != $1
193 | AND bldg_bin != $2
194 | LIMIT 1`;
195 | const res = await performLosQuery(text, [bin1, bin2]);
196 | if (!res) throw new Error("Failed to get intersections");
197 | return res;
198 | }
199 |
200 | // TODO: 3D
201 | async function getDistance(point1, point2) {
202 | const [x1, y1] = point1;
203 | const [x2, y2] = point2;
204 | const text = `SELECT ST_Distance(
205 | 'POINT (${x1} ${y1})'::geometry,
206 | 'POINT (${x2} ${y2})'::geometry
207 | );`;
208 | const res = await performLosQuery(text);
209 | if (!res.length) throw new Error("Failed to calculate distance");
210 | const { st_distance } = res[0];
211 | const distanceFeet = st_distance;
212 | const distanceMeters = Math.round(distanceFeet * 0.3048);
213 | return distanceMeters;
214 | }
215 |
216 | async function getBuildingFromBIN(bin) {
217 | const [building] = await performQuery(
218 | `SELECT *
219 | FROM buildings
220 | WHERE bin = $1
221 | LIMIT 1`,
222 | [bin]
223 | );
224 | return building;
225 | }
226 |
227 | async function saveLOS(building, node) {
228 | return performQuery(
229 | "INSERT INTO los (building_a_id, building_b_id, lat_a, lng_a, alt_a, lat_b, lng_b, alt_b) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)",
230 | [
231 | building.id,
232 | node.building_id,
233 | building.lat,
234 | building.lng,
235 | building.alt,
236 | node.lat,
237 | node.lng,
238 | node.alt,
239 | ]
240 | );
241 | }
242 |
243 | async function getOmnis() {
244 | return performQuery(`SELECT
245 | nodes.id,
246 | nodes.name,
247 | nodes.status,
248 | buildings.bin,
249 | buildings.id as building_id,
250 | buildings.lat,
251 | buildings.lng,
252 | buildings.alt,
253 | json_agg(json_build_object('id', devices.id, 'type', device_types, 'lat', devices.lat, 'lng', devices.lng, 'alt', devices.alt, 'azimuth', devices.azimuth, 'status', devices.status, 'name', devices.name, 'ssid', devices.ssid, 'notes', devices.notes, 'create_date', devices.create_date, 'abandon_date', devices.abandon_date)) AS devices
254 | FROM
255 | nodes
256 | LEFT JOIN buildings ON nodes.building_id = buildings.id
257 | LEFT JOIN devices ON devices.node_id = nodes.id
258 | LEFT JOIN device_types ON devices.device_type_id = device_types.id
259 | WHERE
260 | device_types.name = 'Omni'
261 | AND devices.status = 'active'
262 | AND nodes.status = 'active'
263 | GROUP BY
264 | nodes.id,
265 | buildings.bin,
266 | buildings.id`);
267 | }
268 |
269 | async function getSectors() {
270 | return performQuery(`SELECT
271 | nodes.id,
272 | nodes.name,
273 | nodes.status,
274 | buildings.bin,
275 | buildings.id as building_id,
276 | buildings.lat,
277 | buildings.lng,
278 | buildings.alt,
279 | json_agg(json_build_object('id', devices.id, 'type', device_types, 'lat', devices.lat, 'lng', devices.lng, 'alt', devices.alt, 'azimuth', devices.azimuth, 'status', devices.status, 'name', devices.name, 'ssid', devices.ssid, 'notes', devices.notes, 'create_date', devices.create_date, 'abandon_date', devices.abandon_date)) AS devices
280 | FROM
281 | nodes
282 | LEFT JOIN buildings ON nodes.building_id = buildings.id
283 | LEFT JOIN devices ON devices.node_id = nodes.id
284 | LEFT JOIN device_types ON devices.device_type_id = device_types.id
285 | WHERE
286 | device_types.name IN ('LBE-120', 'SN1Sector1', 'SN1Sector2', 'Mikrotik120', 'LTU-60', 'PS-5AC')
287 | AND devices.status = 'active'
288 | AND nodes.status = 'active'
289 | GROUP BY
290 | nodes.id,
291 | buildings.bin,
292 | buildings.id`);
293 | }
294 |
295 | // Get hardcoded requests and requests with scheduled appointments
296 | // added Vernon 5916 as potential
297 | async function getRequests() {
298 | return performQuery(`SELECT
299 | requests.id,
300 | requests.status,
301 | buildings.id AS building_id,
302 | buildings.bin,
303 | buildings.address,
304 | buildings.lat,
305 | buildings.lng,
306 | buildings.alt
307 | FROM
308 | requests
309 | JOIN buildings ON requests.building_id = buildings.id
310 | LEFT JOIN appointments ON appointments.request_id = requests.id
311 | WHERE
312 | requests.id IN(5916)
313 | OR(
314 | SELECT
315 | id FROM appointments
316 | WHERE
317 | appointments.request_id = requests.id
318 | AND appointments.type = 'install'
319 | LIMIT 1) IS NOT NULL
320 | GROUP BY
321 | requests.id,
322 | buildings.bin,
323 | buildings.id,
324 | appointments.id`);
325 | }
326 |
--------------------------------------------------------------------------------
/src/db/map.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function authorizedGetMap() {
4 | const results = await performQuery(`SELECT
5 | json_build_object(
6 | 'nodes',
7 | COALESCE(
8 | (
9 | SELECT
10 | json_agg(
11 | json_build_object(
12 | 'id', nodes.id,
13 | 'lat', nodes.lat,
14 | 'lng', nodes.lng,
15 | 'status', nodes.status,
16 | 'name', nodes.name,
17 | 'notes', nodes.notes,
18 | 'devices', COALESCE((
19 | SELECT
20 | json_agg(
21 | json_build_object(
22 | 'id', devices.id,
23 | 'type', device_types,
24 | 'lat', devices.lat,
25 | 'lng', devices.lng,
26 | 'azimuth', devices.azimuth,
27 | 'status', devices.status
28 | )
29 | )
30 | FROM
31 | devices
32 | LEFT JOIN device_types ON device_types.id = devices.device_type_id
33 | WHERE devices.node_id = nodes.id
34 | ),
35 | '[]'
36 | )
37 | )
38 | )
39 | FROM
40 | nodes
41 | ),
42 | '[]'
43 | ),
44 | 'requests',
45 | COALESCE(
46 | (
47 | SELECT
48 | json_agg(
49 | DISTINCT jsonb_build_object(
50 | 'id', requests.id,
51 | 'lat', buildings.lat,
52 | 'lng', buildings.lng,
53 | 'bin', buildings.bin,
54 | 'status', requests.status,
55 | 'roof_access', requests.roof_access,
56 | 'has_panoramas', panoramas IS NOT NULL
57 | )
58 | )
59 | FROM
60 | requests
61 | JOIN buildings ON buildings.id = requests.building_id
62 | LEFT JOIN panoramas ON requests.id = panoramas.request_id
63 | ),
64 | '[]'
65 | ),
66 | 'links',
67 | COALESCE(
68 | (
69 | SELECT
70 | json_agg(
71 | json_build_object(
72 | 'id', links.id,
73 | 'status', links.status,
74 | 'devices', (
75 | SELECT
76 | json_agg(
77 | json_build_object(
78 | 'id', devices.id,
79 | 'type', device_types,
80 | 'lat', devices.lat,
81 | 'lng', devices.lng,
82 | 'node_id', devices.node_id
83 | )
84 | )
85 | FROM
86 | devices
87 | LEFT JOIN device_types ON device_types.id = devices.device_type_id
88 | WHERE devices.id IN(links.device_a_id, links.device_b_id)
89 | )
90 | )
91 | )
92 | FROM
93 | links
94 | ),
95 | '[]'
96 | ),
97 | 'appointments',
98 | COALESCE(
99 | (
100 | SELECT
101 | json_agg(
102 | json_build_object(
103 | 'id', appointments.id,
104 | 'type', appointments.type,
105 | 'lat', buildings.lat,
106 | 'lng', buildings.lng
107 | )
108 | )
109 | FROM appointments
110 | JOIN buildings ON buildings.id = appointments.building_id
111 | WHERE
112 | appointments.date > now() - INTERVAL '6 HOURS'
113 | ),
114 | '[]'
115 | )
116 | ) as map`);
117 | return results[0].map;
118 | }
119 |
120 | export async function getMap() {
121 | const results = await performQuery(`SELECT
122 | json_build_object(
123 | 'nodes',
124 | COALESCE(
125 | (
126 | SELECT
127 | json_agg(
128 | json_build_object(
129 | 'id', nodes.id,
130 | 'lat', nodes.lat,
131 | 'lng', nodes.lng,
132 | 'status', nodes.status,
133 | 'name', nodes.name,
134 | 'notes', nodes.notes,
135 | 'devices', COALESCE((
136 | SELECT
137 | json_agg(
138 | json_build_object(
139 | 'id', devices.id,
140 | 'type', device_types,
141 | 'lat', devices.lat,
142 | 'lng', devices.lng,
143 | 'azimuth', devices.azimuth,
144 | 'status', devices.status
145 | )
146 | )
147 | FROM
148 | devices
149 | LEFT JOIN device_types ON device_types.id = devices.device_type_id
150 | WHERE devices.node_id = nodes.id
151 | ),
152 | '[]'
153 | )
154 | )
155 | )
156 | FROM
157 | nodes
158 | ),
159 | '[]'
160 | ),
161 | 'requests',
162 | COALESCE(
163 | (
164 | SELECT
165 | json_agg(
166 | DISTINCT jsonb_build_object(
167 | 'id', requests.id,
168 | 'lat', buildings.lat,
169 | 'lng', buildings.lng,
170 | 'bin', buildings.bin,
171 | 'status', requests.status,
172 | 'roof_access', requests.roof_access,
173 | 'has_panoramas', panoramas IS NOT NULL
174 | )
175 | )
176 | FROM
177 | requests
178 | JOIN buildings ON buildings.id = requests.building_id
179 | LEFT JOIN panoramas ON requests.id = panoramas.request_id
180 | ),
181 | '[]'
182 | ),
183 | 'links',
184 | COALESCE(
185 | (
186 | SELECT
187 | json_agg(
188 | json_build_object(
189 | 'id', links.id,
190 | 'status', links.status,
191 | 'devices', (
192 | SELECT
193 | json_agg(
194 | json_build_object(
195 | 'id', devices.id,
196 | 'type', device_types,
197 | 'lat', devices.lat,
198 | 'lng', devices.lng,
199 | 'node_id', devices.node_id
200 | )
201 | )
202 | FROM
203 | devices
204 | LEFT JOIN device_types ON device_types.id = devices.device_type_id
205 | WHERE devices.id IN(links.device_a_id, links.device_b_id)
206 | )
207 | )
208 | )
209 | FROM
210 | links
211 | ),
212 | '[]'
213 | )
214 | ) as map`);
215 | return results[0].map;
216 | }
217 |
--------------------------------------------------------------------------------
/src/db/members.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function getMembers() {
4 | return performQuery(`SELECT
5 | members.*,
6 | COALESCE(JSON_AGG(DISTINCT nodes.*) FILTER (WHERE nodes.id IS NOT NULL), '[]') AS nodes
7 | FROM
8 | members
9 | LEFT JOIN memberships ON memberships.member_id = members.id
10 | LEFT JOIN nodes ON nodes.id = memberships.node_id
11 | GROUP BY
12 | members.id
13 | ORDER BY
14 | members.id DESC`);
15 | }
16 |
17 | export async function getMember(id) {
18 | if (!Number.isInteger(parseInt(id, 10))) throw new Error("Bad params");
19 |
20 | const [member] = await performQuery(
21 | `SELECT
22 | *
23 | FROM
24 | members
25 | WHERE
26 | members.id = $1`,
27 | [id]
28 | );
29 | if (!member) throw new Error("Not found");
30 |
31 | const nodes = await performQuery(
32 | `SELECT
33 | nodes.*,
34 | to_json(buildings) AS building,
35 | json_agg(
36 | json_build_object(
37 | 'id', devices.id,
38 | 'type', device_types,
39 | 'lat', devices.lat,
40 | 'lng', devices.lng,
41 | 'azimuth', devices.azimuth,
42 | 'status', devices.status
43 | )
44 | ) AS devices
45 | FROM
46 | nodes
47 | JOIN buildings ON nodes.building_id = buildings.id
48 | JOIN memberships ON memberships.node_id = nodes.id
49 | LEFT JOIN devices ON devices.node_id = nodes.id
50 | LEFT JOIN device_types ON device_types.id = devices.device_type_id
51 | WHERE
52 | memberships.member_id = $1
53 | GROUP BY
54 | nodes.id,
55 | buildings.id`,
56 | [id]
57 | );
58 |
59 | const requests = await performQuery(
60 | `SELECT
61 | requests.*,
62 | to_json(buildings) AS building,
63 | to_json(members) AS member
64 | FROM
65 | requests
66 | JOIN buildings ON requests.building_id = buildings.id
67 | JOIN members ON members.id = requests.member_id
68 | WHERE
69 | member_id = $1
70 | GROUP BY
71 | requests.id,
72 | buildings.id,
73 | members.id`,
74 | [id]
75 | );
76 |
77 | return {
78 | ...member,
79 | nodes,
80 | requests,
81 | };
82 | }
83 |
84 | export async function updateMember(id, patch) {
85 | const member = await getMember(id);
86 | const newMember = {
87 | ...member,
88 | ...patch,
89 | };
90 |
91 | return performQuery(
92 | `UPDATE
93 | members
94 | SET
95 | name = $2,
96 | email = $3,
97 | phone = $4,
98 | stripe_customer_id = $5
99 | WHERE
100 | id = $1
101 | RETURNING
102 | *`,
103 | [
104 | id,
105 | newDevice.name,
106 | newDevice.email,
107 | newDevice.phone,
108 | newDevice.stripe_customer_id,
109 | ]
110 | );
111 | }
112 |
--------------------------------------------------------------------------------
/src/db/memberships.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function createMembership(nodeId, params) {
4 | const results = await performQuery(
5 | `INSERT INTO memberships (node_id, member_id) VALUES($1, $2) RETURNING *`,
6 | [nodeId, params.member_id]
7 | );
8 |
9 | return results[0];
10 | }
11 |
12 | export async function destroyMembership(id) {
13 | const results = await performQuery(
14 | `DELETE FROM memberships WHERE id = $1 RETURNING *`,
15 | [id]
16 | );
17 |
18 | return results[0];
19 | }
20 |
21 | export async function findMembership(nodeId, memberId) {
22 | const results = await performQuery(
23 | `SELECT * FROM memberships WHERE node_id = $1 AND member_id = $2`,
24 | [nodeId, memberId]
25 | );
26 |
27 | return results[0];
28 | }
29 |
--------------------------------------------------------------------------------
/src/db/nodes.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function getNode(id) {
4 | if (!Number.isInteger(parseInt(id, 10))) throw new Error("Bad params");
5 | const [node] = await performQuery(
6 | `SELECT
7 | nodes.id,
8 | nodes.lat,
9 | nodes.lng,
10 | nodes.alt,
11 | nodes.status,
12 | nodes.name,
13 | nodes.notes,
14 | json_build_object('id', buildings.id, 'lat', buildings.lat, 'lng', buildings.lng, 'alt', buildings.alt, 'bin', buildings.bin, 'notes', buildings.notes) AS building,
15 | COALESCE(json_agg(panoramas ORDER BY panoramas.date DESC) FILTER (WHERE panoramas IS NOT NULL), '[]') AS panoramas,
16 | (${nodeDevicesQuery()}) AS devices,
17 | (${connectedNodesQuery()}) AS connected_nodes
18 | FROM
19 | nodes
20 | LEFT JOIN buildings ON nodes.building_id = buildings.id
21 | LEFT JOIN requests ON requests.building_id = buildings.id
22 | LEFT JOIN panoramas ON panoramas.request_id = requests.id
23 | WHERE
24 | nodes.id = $1
25 | GROUP BY
26 | nodes.id,
27 | buildings.id`,
28 | [id]
29 | );
30 | if (!node) throw Error("Not found");
31 | return node;
32 | }
33 |
34 | export async function authorizedGetNode(id) {
35 | if (!Number.isInteger(parseInt(id, 10))) throw new Error("Bad params");
36 | const [node] = await performQuery(
37 | `SELECT
38 | nodes.*,
39 | to_json(buildings) AS building,
40 | (${nodeMembersQuery()}) AS members,
41 | json_agg(DISTINCT requests) AS requests,
42 | COALESCE(json_agg(panoramas ORDER BY panoramas.date DESC) FILTER (WHERE panoramas IS NOT NULL), '[]') AS panoramas,
43 | (${nodeDevicesQuery()}) AS devices,
44 | (${connectedNodesQuery()}) AS connected_nodes
45 | FROM
46 | nodes
47 | LEFT JOIN buildings ON buildings.id = nodes.building_id
48 | LEFT JOIN requests ON requests.building_id = buildings.id
49 | LEFT JOIN panoramas ON panoramas.request_id = requests.id
50 | WHERE
51 | nodes.id = $1
52 | GROUP BY
53 | nodes.id,
54 | buildings.id`,
55 | [id]
56 | );
57 | if (!node) throw Error("Not found");
58 | return node;
59 | }
60 |
61 | export async function getNodes() {
62 | return performQuery(`SELECT
63 | nodes.id,
64 | nodes.lat,
65 | nodes.lng,
66 | nodes.alt,
67 | nodes.status,
68 | nodes.name,
69 | nodes.notes,
70 | buildings.address AS building,
71 | (${nodeDevicesQuery()}) AS devices
72 | FROM
73 | nodes
74 | LEFT JOIN buildings ON nodes.building_id = buildings.id
75 | LEFT JOIN devices ON nodes.id = devices.node_id
76 | LEFT JOIN device_types ON device_types.id IN (devices.device_type_id)
77 | GROUP BY
78 | nodes.id,
79 | buildings.id
80 | ORDER BY
81 | nodes.create_date DESC`);
82 | }
83 |
84 | export async function authorizedGetNodes() {
85 | return performQuery(`SELECT
86 | nodes.*,
87 | buildings.address AS building,
88 | (${nodeDevicesQuery()}) AS devices
89 | FROM
90 | nodes
91 | LEFT JOIN buildings ON nodes.building_id = buildings.id
92 | LEFT JOIN devices ON nodes.id = devices.node_id
93 | LEFT JOIN device_types ON device_types.id IN (devices.device_type_id)
94 | GROUP BY
95 | nodes.id,
96 | buildings.id
97 | ORDER BY
98 | nodes.create_date DESC`);
99 | }
100 |
101 | export async function createNode(node) {
102 | const { lat, lng, alt, status, name, notes, building_id } = node;
103 | const randomId = await unusedNodeId();
104 | const now = new Date();
105 | return performQuery(
106 | `INSERT INTO nodes (id, lat, lng, alt, status, name, notes, create_date, building_id)
107 | VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9)
108 | RETURNING
109 | *`,
110 | [randomId, lat, lng, alt, status, name, notes, now, building_id]
111 | );
112 |
113 | // Super hacky way to find unused node id
114 | // Keep trying random numbers between 100-8000
115 | async function unusedNodeId(tries = 10) {
116 | if (tries === 0) throw new Error("Unable to find unused node id");
117 | const randomId = 100 + Math.floor(Math.random() * 7900);
118 | const [
119 | existingNode,
120 | ] = await performQuery("SELECT * FROM nodes WHERE id = $1", [randomId]);
121 | if (existingNode) return unusedNodeId(tries - 1);
122 | return randomId;
123 | }
124 | }
125 |
126 | export async function updateNode(id, patch) {
127 | const existingNode = await authorizedGetNode(id);
128 |
129 | // TODO: Sanitize / validated new values!!
130 |
131 | const newNode = {
132 | ...existingNode,
133 | ...patch,
134 | };
135 | await performQuery(
136 | `UPDATE nodes SET status = $2, lat = $3, lng = $4, alt = $5, name = $6, notes = $7, building_id = $8
137 | WHERE id = $1
138 | RETURNING
139 | *`,
140 | [
141 | id,
142 | newNode.status,
143 | newNode.lat,
144 | newNode.lng,
145 | newNode.alt,
146 | newNode.name,
147 | newNode.notes,
148 | newNode.building_id,
149 | ]
150 | );
151 |
152 | const updatedNode = await getNode(id);
153 | return updatedNode;
154 | }
155 |
156 | function nodeDevicesQuery() {
157 | return `SELECT
158 | COALESCE(json_agg(
159 | json_build_object(
160 | 'id', devices.id,
161 | 'type', device_types,
162 | 'lat', devices.lat,
163 | 'lng', devices.lng,
164 | 'alt', devices.alt,
165 | 'azimuth', devices.azimuth,
166 | 'status', devices.status,
167 | 'name', devices.name,
168 | 'ssid', devices.ssid,
169 | 'notes', devices.notes,
170 | 'create_date', devices.create_date,
171 | 'abandon_date', devices.abandon_date
172 | )
173 | ), '[]') AS devices
174 | FROM
175 | devices
176 | LEFT JOIN device_types ON device_types.id IN(devices.device_type_id)
177 | WHERE
178 | devices.node_id = nodes.id`;
179 | }
180 |
181 | function nodeMembersQuery() {
182 | return `SELECT
183 | json_agg(json_build_object('id', members.id, 'name', members.name, 'email', members.email, 'phone', members.phone, 'donor', members.donor, 'membership_id', memberships.id))
184 | FROM
185 | members
186 | LEFT JOIN memberships ON memberships.member_id = members.id
187 | WHERE
188 | memberships.node_id = $1`;
189 | }
190 |
191 | function connectedNodesQuery() {
192 | return `COALESCE(
193 | (SELECT
194 | json_agg(json_build_object(
195 | 'id', nodes.id,
196 | 'lat', nodes.lat,
197 | 'lng', nodes.lng,
198 | 'alt', nodes.alt,
199 | 'status', nodes.status,
200 | 'name', nodes.name,
201 | 'notes', nodes.notes
202 | ))
203 | FROM
204 | devices devices1
205 | JOIN links ON links.device_a_id = devices1.id
206 | OR links.device_b_id = devices1.id
207 | JOIN devices devices2 ON devices2.id = links.device_b_id
208 | OR devices2.id = links.device_a_id
209 | JOIN nodes nodes ON nodes.id = devices2.node_id
210 | WHERE
211 | devices1.node_id = $1
212 | AND nodes.id != $1
213 | AND links.status = 'active'),
214 | '[]'
215 | )`;
216 | }
217 |
--------------------------------------------------------------------------------
/src/db/panos.js:
--------------------------------------------------------------------------------
1 | import AWS from "aws-sdk";
2 | import { panoMessage } from "../slack";
3 | import { getRequest } from "./requests";
4 | import { performQuery } from ".";
5 |
6 | const s3 = new AWS.S3({
7 | endpoint: process.env.S3_ENDPOINT,
8 | accessKeyId: process.env.S3_ID,
9 | secretAccessKey: process.env.S3_KEY,
10 | });
11 |
12 | export async function getUploadURL(name, type) {
13 | if (!name || !type) throw new Error("Bad params");
14 |
15 | // const timestamp =
16 |
17 | // TODO: Validate content type?
18 |
19 | const url = await s3.getSignedUrl("putObject", {
20 | Bucket: process.env.S3_BUCKET,
21 | Key: name,
22 | ContentType: type,
23 | ACL: "public-read",
24 | });
25 |
26 | return url;
27 | }
28 |
29 | export async function createPano({ url, request_id }, slackClient) {
30 | if (!url) throw new Error("Bad params");
31 |
32 | const [
33 | pano,
34 | ] = await performQuery(
35 | "INSERT INTO panoramas (url, date, request_id) VALUES ($1, $2, $3) RETURNING *",
36 | [url, new Date(), request_id]
37 | );
38 |
39 | try {
40 | const request = await getRequest(request_id);
41 | await panoMessage(slackClient, pano, request);
42 | } catch (error) {
43 | console.log("Failed to send pano slack message!");
44 | console.log(error.message);
45 | }
46 |
47 | return pano;
48 | }
49 |
--------------------------------------------------------------------------------
/src/db/requests.js:
--------------------------------------------------------------------------------
1 | import crypto from "crypto";
2 | import fetch from "node-fetch";
3 | import { getLos, getBuildingHeightMeters } from "./los";
4 | import { requestMessage } from "../slack";
5 | import { performQuery } from ".";
6 |
7 | export async function getRequest(id) {
8 | if (!Number.isInteger(parseInt(id, 10))) throw new Error("Bad params");
9 | const [request] = await performQuery(
10 | `SELECT
11 | requests.*,
12 | to_json(buildings) AS building,
13 | to_json(members) AS member,
14 | COALESCE(json_agg(DISTINCT panoramas) FILTER (WHERE panoramas IS NOT NULL), '[]') AS panoramas
15 | FROM
16 | requests
17 | JOIN buildings ON requests.building_id = buildings.id
18 | JOIN members ON requests.member_id = members.id
19 | LEFT JOIN panoramas ON requests.id = panoramas.request_id
20 | WHERE
21 | requests.id = $1
22 | GROUP BY
23 | requests.id,
24 | buildings.id,
25 | members.id`,
26 | [id]
27 | );
28 | if (!request) throw new Error("Not found");
29 | return request;
30 | }
31 |
32 | // Get request without api key using a secret token
33 | export async function getRequestFromToken(token) {
34 | if (!token) throw new Error("Bad params");
35 | const [request] = await performQuery(
36 | `SELECT
37 | requests.*,
38 | to_json(buildings) AS building,
39 | COALESCE(json_agg(DISTINCT panoramas) FILTER (WHERE panoramas IS NOT NULL), '[]') AS panoramas
40 | FROM
41 | requests
42 | JOIN buildings ON requests.building_id = buildings.id
43 | LEFT JOIN panoramas ON requests.id = panoramas.request_id
44 | WHERE
45 | requests.id = $1
46 | GROUP BY
47 | requests.id,
48 | buildings.id`,
49 | [id]
50 | );
51 | if (!request) throw new Error("Not found");
52 | return request;
53 | }
54 |
55 | export async function getRequests() {
56 | return performQuery(`SELECT
57 | requests.*,
58 | to_json(buildings) AS building,
59 | to_json(members) AS member
60 | FROM
61 | requests
62 | LEFT JOIN buildings ON requests.building_id = buildings.id
63 | LEFT JOIN members ON requests.member_id = members.id
64 | GROUP BY
65 | requests.id,
66 | buildings.id,
67 | members.id
68 | ORDER BY
69 | date DESC`);
70 | }
71 |
72 | export async function createRequest(request, slackClient) {
73 | const {
74 | name,
75 | email,
76 | phone,
77 | address,
78 | apartment,
79 | roof_access,
80 | roofAccess,
81 | spreadsheetId,
82 | } = request;
83 |
84 | const isInvalid = !name || !email || !phone || !address || !apartment;
85 | if (!spreadsheetId && isInvalid) {
86 | throw new Error("Invalid request");
87 | }
88 |
89 | // Geocode address
90 | let { lat, lng, bin } = request;
91 | try {
92 | const googleData = await getGoogleData(address);
93 | const nycData = await getNycData(address, lat, lng);
94 | lat = googleData.geometry.location.lat || nycData.lat;
95 | lng = googleData.geometry.location.lng || nycData.lng;
96 | bin = nycData.bin;
97 | } catch (error) {
98 | console.log(error);
99 | }
100 |
101 | const alt = await getBuildingHeightMeters(bin);
102 |
103 | // Look up building by bin
104 | let building;
105 | try {
106 | [building] = await performQuery("SELECT * FROM buildings WHERE bin = $1", [
107 | request.bin,
108 | ]);
109 | } catch (error) {
110 | console.log(error);
111 | }
112 |
113 | // Look up building by address
114 | if (!building) {
115 | [
116 | building,
117 | ] = await performQuery("SELECT * FROM buildings WHERE address = $1", [
118 | address,
119 | ]);
120 | }
121 |
122 | // Create building if new
123 | if (!building) {
124 | [
125 | building,
126 | ] = await performQuery(
127 | "INSERT INTO buildings (address, lat, lng, alt, bin) VALUES ($1, $2, $3, $4, $5) RETURNING *",
128 | [address, lat, lng, alt, bin]
129 | );
130 | }
131 |
132 | // Look up member by email
133 | let [member] = await performQuery("SELECT * FROM members WHERE email = $1", [
134 | email,
135 | ]);
136 |
137 | // Create member if new
138 | if (!member) {
139 | [
140 | member,
141 | ] = await performQuery(
142 | "INSERT INTO members (name, email, phone) VALUES ($1, $2, $3) RETURNING *",
143 | [name, email, phone]
144 | );
145 | }
146 |
147 | // Insert request
148 | const now = new Date();
149 | let [
150 | dbRequest,
151 | ] = await performQuery(
152 | "INSERT INTO requests (date, apartment, roof_access, member_id, building_id) VALUES ($1, $2, $3, $4, $5) RETURNING *",
153 | [now, apartment, roof_access || roofAccess, member.id, building.id]
154 | );
155 |
156 | // Create token
157 | const buffer = await crypto.randomBytes(8);
158 | const token = buffer.toString("hex");
159 | await performQuery(
160 | "INSERT INTO request_tokens (token, request_id) VALUES ($1, $2) RETURNING *",
161 | [token, dbRequest.id]
162 | );
163 |
164 | // Get los
165 | let visibleNodes = [];
166 | try {
167 | const { visibleSectors, visibleOmnis } = await getLos(bin);
168 | visibleNodes.push(...visibleSectors, ...visibleOmnis);
169 | } catch (error) {
170 | console.log("Failed to get line of sight");
171 | console.log(error);
172 | }
173 |
174 | // Send Slack message and save timestamp to db
175 | try {
176 | const slackRequest = {
177 | ...dbRequest,
178 | id: spreadsheetId || dbRequest.id,
179 | };
180 | const slack_ts = await sendSlackMessage({
181 | request: slackRequest,
182 | building,
183 | visibleNodes,
184 | slackClient,
185 | });
186 | await performQuery(
187 | "UPDATE requests SET slack_ts = $1 WHERE id = $2 RETURNING *",
188 | [slack_ts, dbRequest.id]
189 | );
190 | } catch (error) {
191 | console.log(error);
192 | }
193 |
194 | dbRequest = await getRequest(dbRequest.id);
195 |
196 | return {
197 | ...dbRequest,
198 | token,
199 | };
200 | }
201 |
202 | async function sendSlackMessage({
203 | request,
204 | building,
205 | visibleNodes,
206 | slackClient,
207 | }) {
208 | const buildingNodes = await performQuery(
209 | "SELECT * FROM nodes WHERE nodes.building_id = $1 AND nodes.status = 'active'",
210 | [request.building_id]
211 | );
212 | const { ts } = await requestMessage(
213 | slackClient,
214 | request,
215 | building,
216 | visibleNodes,
217 | buildingNodes
218 | );
219 | return ts;
220 | }
221 |
222 | export async function updateRequest(id, patch) {
223 | const existingRequest = await getRequest(id, true);
224 |
225 | // TODO: Sanitize / validated new values!!
226 |
227 | const newRequest = {
228 | ...existingRequest,
229 | ...patch,
230 | };
231 |
232 | await performQuery(
233 | `UPDATE
234 | requests
235 | SET
236 | status = $2,
237 | apartment = $3,
238 | roof_access = $4
239 | WHERE
240 | id = $1
241 | RETURNING
242 | *`,
243 | [id, newRequest.status, newRequest.apartment, newRequest.roof_access]
244 | );
245 |
246 | const updatedRequest = await getRequest(id);
247 | return updatedRequest;
248 | }
249 |
250 | // https://docs.osticket.com/en/latest/Developer%20Documentation/API/Tickets.html
251 | async function createTicket(request, building, member) {
252 | const { id, date } = request;
253 | const { address, lat, lng } = building;
254 | const { name, email, phone } = member;
255 |
256 | const subject = `NYC Mesh Install`;
257 | const message = address;
258 |
259 | const url = "http://devsupport.nycmesh.net/api/http.php/tickets.json";
260 | const response = await fetch(url, {
261 | method: "POST",
262 | headers: {
263 | "Content-Type": "application/json",
264 | "X-API-Key": process.env.OSTICKET_API_KEY,
265 | },
266 | body: JSON.stringify({
267 | email,
268 | name,
269 | subject,
270 | message,
271 | phone,
272 | }),
273 | });
274 |
275 | const text = await response.text();
276 | if (response.status !== 201) {
277 | throw new Error(text);
278 | }
279 |
280 | return text; // external ticket id of the newly-created ticket
281 | }
282 |
283 | async function getGoogleData(address) {
284 | const encodedAddress = encodeURIComponent(address);
285 | const params = `address=${encodedAddress}&key=${process.env.GOOGLE_MAPS_API_KEY}`;
286 | const URL = `https://maps.googleapis.com/maps/api/geocode/json?${params}`;
287 | const res = await fetch(URL);
288 | const json = await res.json();
289 | return json.results[0];
290 | }
291 |
292 | async function getOsmData(address) {
293 | const encodedAddress = encodeURIComponent(address);
294 | const URL = `https://nominatim.openstreetmap.org/search?q=${encodedAddress}&format=json`;
295 | const res = await fetch(URL);
296 | const json = await res.json();
297 | return json[0];
298 | }
299 |
300 | async function getNycData(address, buildingLat = 0, buildingLng = 0) {
301 | const URIaddress = encodeURIComponent(address);
302 | const URL = `https://geosearch.planninglabs.nyc/v1/search?text=${URIaddress}`;
303 | const binRes = await fetch(URL);
304 | const { features } = await binRes.json();
305 |
306 | if (!features.length) {
307 | return {};
308 | }
309 |
310 | const [feature] = features.sort(sortByDistance);
311 | const { properties, geometry } = feature;
312 | const bin = properties.pad_bin;
313 | const [lng, lat] = geometry.coordinates;
314 |
315 | return {
316 | lat,
317 | lng,
318 | bin,
319 | };
320 |
321 | function sortByDistance(a, b) {
322 | const buildingLatLng = [buildingLng, buildingLat];
323 | const distanceA = distance(a.geometry.coordinates, buildingLatLng);
324 | const distanceB = distance(b.geometry.coordinates, buildingLatLng);
325 | return distanceA - distanceB;
326 | }
327 |
328 | function distance(a, b) {
329 | const xDiff = a[0] - b[0];
330 | const yDiff = a[1] - b[1];
331 | return Math.sqrt(xDiff * xDiff + yDiff * yDiff);
332 | }
333 | }
334 |
--------------------------------------------------------------------------------
/src/db/search.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from ".";
2 |
3 | export async function getSearch(query, authorized) {
4 | const results = {};
5 |
6 | if (authorized) {
7 | results.nodes = await authorizedSearchNodes(query);
8 | results.buildings = await authorizedSearchBuildings(query);
9 | results.requests = await authorizedSearchRequests(query);
10 | results.members = await authorizedSearchMembers(query);
11 | } else {
12 | results.nodes = await searchNodes(query);
13 | results.buildings = await searchBuildings(query);
14 | }
15 |
16 | return results;
17 | }
18 |
19 | function authorizedSearchNodes(query) {
20 | return performQuery(
21 | `SELECT
22 | nodes.*
23 | FROM
24 | nodes
25 | JOIN memberships ON memberships.node_id = nodes.id
26 | JOIN members ON members.id = memberships.member_id
27 | WHERE
28 | CAST(nodes.id AS VARCHAR) = $1
29 | OR nodes.name ILIKE $2
30 | OR nodes.notes ILIKE $3
31 | OR members.name ILIKE $2
32 | OR members.email ILIKE $2
33 | GROUP BY
34 | nodes.id
35 | LIMIT 5`,
36 | [query, `${query}%`, `%${query}%`]
37 | );
38 | }
39 |
40 | function searchNodes(query) {
41 | return performQuery(
42 | `SELECT
43 | nodes.id,
44 | nodes.lat,
45 | nodes.lng,
46 | nodes.alt,
47 | nodes.status,
48 | nodes.name,
49 | nodes.notes,
50 | nodes.building_id
51 | FROM
52 | nodes
53 | WHERE
54 | CAST(nodes.id AS VARCHAR) = $1
55 | OR nodes.name ILIKE $2
56 | OR nodes.notes ILIKE $3
57 | LIMIT 5`,
58 | [query, `${query}%`, `%${query}%`]
59 | );
60 | }
61 |
62 | function authorizedSearchBuildings(query) {
63 | return performQuery(
64 | `SELECT
65 | *
66 | FROM
67 | buildings
68 | WHERE address ILIKE $1
69 | OR notes ILIKE $2
70 | LIMIT 5`,
71 | [`${query}%`, `%${query}%`]
72 | );
73 | }
74 |
75 | function searchBuildings(query) {
76 | return performQuery(
77 | `SELECT
78 | id,
79 | lat,
80 | lng,
81 | alt,
82 | bin,
83 | notes
84 | FROM
85 | buildings
86 | WHERE address ILIKE $1
87 | OR notes ILIKE $2
88 | LIMIT 5`,
89 | [`${query}%`, `%${query}%`]
90 | );
91 | }
92 |
93 | function authorizedSearchRequests(query) {
94 | return performQuery(
95 | `SELECT
96 | requests.*,
97 | to_json(buildings) AS building,
98 | to_json(members) AS member
99 | FROM
100 | requests
101 | JOIN buildings ON requests.building_id = buildings.id
102 | JOIN members ON requests.member_id = members.id
103 | WHERE CAST(requests.id AS VARCHAR) = $1
104 | OR buildings.address ILIKE $3
105 | OR members.name ILIKE $2
106 | OR members.email ILIKE $2
107 | OR notes ILIKE $3
108 | GROUP BY
109 | requests.id,
110 | buildings.id,
111 | members.id
112 | LIMIT 5`,
113 | [query, `${query}%`, `%${query}%`]
114 | );
115 | }
116 |
117 | export async function authorizedSearchMembers(query) {
118 | return performQuery(
119 | `SELECT *
120 | FROM
121 | members
122 | WHERE name ILIKE $1
123 | OR name ILIKE $2
124 | OR email ILIKE $3
125 | LIMIT 5`,
126 | [`${query}%`, ` ${query}%`, `${query}%`]
127 | );
128 | }
129 |
130 | export async function authorizedSearchDeviceTypes(query) {
131 | return performQuery(
132 | `SELECT *
133 | FROM
134 | device_types
135 | WHERE name ILIKE $1
136 | OR manufacturer ILIKE $1
137 | LIMIT 5`,
138 | [`${query}%`]
139 | );
140 | }
141 |
--------------------------------------------------------------------------------
/src/geojson/links.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from "../db";
2 |
3 | export async function getLinksGeoJSON() {
4 | const links = await getLinks();
5 |
6 | return {
7 | type: "FeatureCollection",
8 | features: links.map(linkFeature),
9 | };
10 | }
11 |
12 | function linkFeature(link) {
13 | const { device_a_lat, device_a_lng, device_b_lat, device_b_lng } = link;
14 | return {
15 | type: "Feature",
16 | properties: {
17 | id: link.id,
18 | },
19 | geometry: {
20 | type: "LineString",
21 | coordinates: [
22 | [device_a_lng, device_a_lat],
23 | [device_b_lng, device_b_lat],
24 | ],
25 | },
26 | };
27 | }
28 |
29 | const getLinksQuery = `SELECT
30 | links.id,
31 | device_a.lat AS device_a_lat,
32 | device_a.lng AS device_a_lng,
33 | device_b.lat AS device_b_lat,
34 | device_b.lng AS device_b_lng
35 | FROM
36 | links
37 | JOIN devices device_a ON device_a.id = links.device_a_id
38 | JOIN devices device_b ON device_b.id = links.device_b_id
39 | WHERE
40 | links.status = 'active'
41 | GROUP BY
42 | links.id,
43 | device_a.id,
44 | device_b.id`;
45 |
46 | async function getLinks() {
47 | return performQuery(getLinksQuery);
48 | }
49 |
--------------------------------------------------------------------------------
/src/geojson/nodes.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from "../db";
2 |
3 | export async function getNodesGeoJSON() {
4 | const nodes = await getNodes();
5 |
6 | return {
7 | type: "FeatureCollection",
8 | features: nodes.map(nodeFeature),
9 | };
10 | }
11 |
12 | function nodeFeature(node) {
13 | return {
14 | type: "Feature",
15 | properties: {
16 | id: node.id,
17 | },
18 | geometry: {
19 | type: "Point",
20 | coordinates: [node.lng, node.lat],
21 | },
22 | };
23 | }
24 |
25 | const getNodesQuery = `SELECT
26 | nodes.id,
27 | nodes.lat,
28 | nodes.lng
29 | FROM
30 | nodes
31 | WHERE
32 | nodes.status = 'active'
33 | ORDER BY
34 | nodes.id`;
35 |
36 | async function getNodes() {
37 | return performQuery(getNodesQuery);
38 | }
39 |
--------------------------------------------------------------------------------
/src/kml/appointments.js:
--------------------------------------------------------------------------------
1 | import { format } from "date-fns";
2 | import { performQuery } from "../db";
3 | import { iconStyle, data, kml } from "./utils";
4 |
5 | export async function getAppointmentsKML(params) {
6 | const appointments = await getAppointments();
7 | const appointmentsKML = appointments.map(appointmentKML);
8 |
9 | const elements = [
10 | iconStyle("install", 0.75, "https://i.imgur.com/4baif2L.png"),
11 | iconStyle("support", 0.75, "https://i.imgur.com/qVRzBlS.png"),
12 | iconStyle("survey", 0.75, "https://i.imgur.com/4baif2L.png"),
13 | appointmentsKML,
14 | ];
15 |
16 | return kml(elements);
17 | }
18 |
19 | function appointmentKML(appointment) {
20 | const capitalizedType = `${appointment.type
21 | .charAt(0)
22 | .toUpperCase()}${appointment.type.slice(1)}`;
23 | const dateString = format(appointment.date, "eee, MMM d");
24 | const hourString = format(appointment.date, "h:mm a", {
25 | timeZone: "America/New_York",
26 | });
27 | const coordinates = `${appointment.lng},${appointment.lat},${appointment.alt}`;
28 | const ticketURL = `Ticket →`;
29 | return `
30 |
31 | ${capitalizedType} - ${dateString}
32 |
33 | ${data("Type", capitalizedType)}
34 | ${data("Date", dateString)}
35 | ${data("Hour", hourString)}
36 | ${data("Links", ticketURL)}
37 |
38 |
39 | absolute
40 | ${coordinates}
41 |
42 | #${appointment.type}
43 | `;
44 | }
45 |
46 | const appointmentsQuery = `SELECT
47 | appointments.*,
48 | buildings.address,
49 | buildings.lat,
50 | buildings.lng,
51 | buildings.alt
52 | FROM
53 | appointments
54 | JOIN buildings ON buildings.id = appointments.building_id
55 | WHERE
56 | appointments.date > now() - INTERVAL '6 HOURS'
57 | ORDER BY
58 | date`;
59 |
60 | async function getAppointments() {
61 | return performQuery(appointmentsQuery);
62 | }
63 |
--------------------------------------------------------------------------------
/src/kml/index.js:
--------------------------------------------------------------------------------
1 | import { networkLink } from "./utils";
2 |
3 | export function getKML() {
4 | return `
5 |
6 |
7 | NYC Mesh API
8 | ${networkLink("Appointments", "/v1/kml/appointments")}
9 | ${networkLink("LoS", "/v1/kml/los")}
10 | ${networkLink("Nodes", "/v1/kml/nodes")}
11 | ${networkLink("Requests", "/v1/kml/requests")}
12 |
13 |
14 | `;
15 | }
16 |
--------------------------------------------------------------------------------
/src/kml/los.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from "../db";
2 | import { lineStyle, kml } from "./utils";
3 |
4 | export async function getLosKML(params) {
5 | const los = await getLos();
6 |
7 | const losByRequest = los.reduce((acc, cur) => {
8 | const [request] = cur.requests;
9 | acc[request.id] = acc[request.id] || [];
10 | acc[request.id].push(cur);
11 | return acc;
12 | }, {});
13 |
14 | const elements = [
15 | lineStyle("losLink", "9900ff00", 2.5),
16 | Object.entries(losByRequest).map(([requestId, requestLos]) => {
17 | const placemarks = requestLos.map(losPlacemark);
18 | return `${requestId}${placemarks}`;
19 | }),
20 | ];
21 |
22 | return kml(elements);
23 | }
24 |
25 | function losPlacemark(los) {
26 | const { building_a_id, building_b_id, nodes, requests } = los;
27 | const { lat_a, lng_a, alt_a, lat_b, lng_b, alt_b } = los;
28 | let fromId = (los.requests[0] || {}).id;
29 | let toId = (los.nodes[0] || {}).id || "Potential";
30 | return `
31 |
32 | Line of Sight
33 |
34 | absolute
35 | ${lng_a},${lat_a},${alt_a} ${lng_b},${lat_b},${alt_b}
36 |
37 | #losLink
38 | `;
39 | }
40 |
41 | const losQuery = `SELECT
42 | los.*,
43 | json_agg(requests) AS requests,
44 | json_agg(nodes) AS nodes
45 | FROM
46 | los
47 | JOIN requests ON requests.building_id = los.building_a_id
48 | AND requests.status = 'open'
49 | LEFT JOIN nodes ON nodes.building_id = los.building_b_id
50 | AND nodes.status = 'active'
51 | GROUP BY
52 | los.id,
53 | los.building_a_id`;
54 |
55 | const losPanoQuery = `SELECT
56 | los.*,
57 | json_agg(requests) AS requests,
58 | json_agg(nodes) AS nodes
59 | FROM
60 | los
61 | JOIN requests ON requests.building_id = los.building_a_id
62 | AND requests.status = 'open'
63 | JOIN panoramas ON panoramas.request_id = requests.id
64 | LEFT JOIN nodes ON nodes.building_id = los.building_b_id
65 | AND nodes.status = 'active'
66 | GROUP BY
67 | los.id,
68 | los.building_a_id`;
69 |
70 | async function getLos() {
71 | return performQuery(losQuery);
72 | }
73 |
74 | const losOfDegreeQuery = `SELECT
75 | los.*,
76 | json_agg(requests) AS requests,
77 | json_agg(nodes) AS nodes
78 | FROM
79 | los
80 | JOIN requests ON requests.building_id IN (los.building_a_id, los.building_b_id)
81 | AND requests.status = 'open'
82 | JOIN nodes ON nodes.building_id = los.building_b_id
83 | AND nodes.status = 'active'
84 | WHERE
85 | building_a_id IN(
86 | SELECT
87 | building_a_id FROM los
88 | GROUP BY
89 | building_a_id
90 | HAVING
91 | count(building_a_id) >= $1)
92 | GROUP BY
93 | los.id`;
94 |
95 | async function getLosOfDegree(degree) {
96 | return performQuery(losOfDegreeQuery, [degree]);
97 | }
98 |
--------------------------------------------------------------------------------
/src/kml/nodes.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from "../db";
2 | import { iconStyle, lineStyle, data, panoData, kml } from "./utils";
3 |
4 | export async function getNodesKML() {
5 | const nodes = await getNodes();
6 | const links = await getLinks();
7 |
8 | const linksByNode = links.reduce((acc, cur) => {
9 | acc[cur.node_a.id] = acc[cur.node_a.id] || [];
10 | acc[cur.node_a.id].push(cur);
11 | return acc;
12 | }, {});
13 |
14 | const nodesKml = nodes
15 | .sort((a, b) => a.id - b.id)
16 | .map(
17 | (node) => `
18 | ${node.id}
19 | ${nodePlacemark(node)}
20 | ${(linksByNode[node.id] || []).map(linkPlacemark)}
21 | `
22 | );
23 |
24 | const elements = [
25 | iconStyle("supernode", 0.6, "https://i.imgur.com/GFd364p.png"),
26 | iconStyle("hub", 0.6, "https://i.imgur.com/dsizT9e.png"),
27 | iconStyle("omni", 0.6, "https://i.imgur.com/dsizT9e.png"),
28 | iconStyle("node", 0.5, "https://i.imgur.com/OBBZi9E.png"),
29 | lineStyle("hubLink", "ff00ffff", 3),
30 | lineStyle("backboneLink", "ff00ffff", 3),
31 | lineStyle("activeLink", "ff0000ff", 3),
32 | nodesKml,
33 | ];
34 |
35 | return kml(elements);
36 | }
37 |
38 | function nodePlacemark(node) {
39 | const dashboardLink = `Dashboard →`;
40 | const ticketLink = `Tickets →`;
41 | return `
42 | ${node.name || `Node ${node.id}`}
43 |
44 | ${node.name ? data("Name", node.name) : ""}
45 | ${data("Status", node.status)}
46 | ${data("Installed", node.create_date.toDateString())}
47 | ${data("Devices", node.device_types.map((d) => d.name).join(", "))}
48 | ${node.notes ? data("Notes", node.notes) : ""}
49 | ${data("Links", `${dashboardLink} ${ticketLink}`)}
50 | ${panoData(node.panoramas.filter((p) => p) || [])}
51 |
52 |
53 | absolute
54 | ${node.lng},${node.lat},${node.alt || 20}
55 |
56 | ${nodeStyleId(node)}
57 | `;
58 | }
59 |
60 | function linkPlacemark(link) {
61 | const { node_a, node_b, device_type_a, device_type_b } = link;
62 | const coordinates = `${node_a.lng},${node_a.lat},${node_a.alt} ${node_b.lng},${node_b.lat},${node_b.alt}`;
63 | const deviceNameA =
64 | device_type_a.name === "Unknown" ? "Unknown Device" : device_type_a.name;
65 | const deviceNameB =
66 | device_type_b.name === "Unknown" ? "Unknown Device" : device_type_b.name;
67 | return `
68 | Link
69 |
70 | ${data("ID", link.id)}
71 | ${data("Status", link.status)}
72 | ${data("From", `${node_a.name || node_a.id} ${deviceNameA}`)}
73 | ${data("To", `${node_b.name || node_b.id} ${deviceNameB}`)}
74 |
75 |
76 | absolute
77 | ${coordinates}
78 |
79 | ${linkStyleId(link)}
80 |
81 | `;
82 | }
83 |
84 | const isOmni = (device_type) => device_type.name === "Omni";
85 | const isSupernode = (node) => node.name && node.name.includes("Supernode");
86 | const isHub = (node) => node.notes && node.notes.includes("hub");
87 | const isBackbone = (node, device_type) =>
88 | isSupernode(node) || isHub(node) || isOmni(device_type);
89 |
90 | function nodeStyleId(node) {
91 | const { name, notes, device_types } = node;
92 | if (isSupernode(node)) return "#supernode";
93 | if (isHub(node)) return "#hub";
94 | if (device_types.filter(isOmni).length) return "#omni";
95 | return "#node";
96 | }
97 |
98 | // TODO: Need to check all devices on each node to determine color.
99 | function linkStyleId(link) {
100 | const { node_a, node_b, device_type_a, device_type_b } = link;
101 | if (isHub(node_a) && isHub(node_b)) return "#hubLink";
102 | if (isBackbone(node_a, device_type_a) && isBackbone(node_b, device_type_b))
103 | return "#backboneLink";
104 | return "#activeLink";
105 | }
106 |
107 | const getNodesQuery = `SELECT
108 | nodes.id,
109 | nodes.name,
110 | nodes.status,
111 | nodes.notes,
112 | nodes.create_date,
113 | buildings.lat,
114 | buildings.lng,
115 | buildings.alt,
116 | json_agg(DISTINCT devices) as devices,
117 | json_agg(DISTINCT device_types) as device_types,
118 | json_agg(DISTINCT panoramas) as panoramas
119 | FROM
120 | nodes
121 | LEFT JOIN buildings ON nodes.building_id = buildings.id
122 | LEFT JOIN devices ON nodes.id = devices.node_id
123 | LEFT JOIN device_types ON device_types.id IN (devices.device_type_id)
124 | LEFT JOIN requests ON requests.building_id = buildings.id
125 | LEFT JOIN panoramas ON panoramas.request_id = requests.id
126 | WHERE
127 | nodes.status = 'active'
128 | GROUP BY
129 | nodes.id,
130 | buildings.id
131 | ORDER BY
132 | nodes.create_date DESC`;
133 |
134 | async function getNodes() {
135 | return performQuery(getNodesQuery);
136 | }
137 |
138 | const getLinksQuery = `SELECT
139 | links.*,
140 | (
141 | SELECT
142 | to_json(devices.*)
143 | FROM
144 | devices
145 | WHERE
146 | devices.id = device_a_id) AS device_a,
147 | (
148 | SELECT
149 | to_json(devices.*)
150 | FROM
151 | devices
152 | WHERE
153 | devices.id = device_b_id) AS device_b,
154 | (
155 | SELECT
156 | to_json(device_types.*)
157 | FROM
158 | devices
159 | JOIN device_types ON device_types.id = devices.device_type_id
160 | WHERE
161 | devices.id = device_a_id) AS device_type_a,
162 | (
163 | SELECT
164 | to_json(device_types.*)
165 | FROM
166 | devices
167 | JOIN device_types ON device_types.id = devices.device_type_id
168 | WHERE
169 | devices.id = device_b_id) AS device_type_b,
170 | (
171 | SELECT
172 | to_json(nodes.*)
173 | FROM
174 | devices
175 | JOIN nodes ON nodes.id = devices.node_id
176 | WHERE
177 | devices.id = device_a_id) AS node_a,
178 | (
179 | SELECT
180 | to_json(nodes.*)
181 | FROM
182 | devices
183 | JOIN nodes ON nodes.id = devices.node_id
184 | WHERE
185 | devices.id = device_b_id) AS node_b
186 | FROM
187 | links
188 | JOIN devices ON devices.id IN (links.device_a_id, links.device_b_id)
189 | JOIN device_types ON device_types.id = devices.device_type_id
190 | JOIN nodes ON nodes.id = devices.node_id
191 | WHERE
192 | links.status = 'active'
193 | GROUP BY
194 | links.id`;
195 |
196 | async function getLinks() {
197 | return performQuery(getLinksQuery);
198 | }
199 |
--------------------------------------------------------------------------------
/src/kml/requests.js:
--------------------------------------------------------------------------------
1 | import { performQuery } from "../db";
2 | import { iconStyle, data, panoData, kml } from "./utils";
3 |
4 | export async function getRequestsKML(params) {
5 | const requests = await getRequests();
6 |
7 | const elements = [
8 | iconStyle("request", 0.3, "https://i.imgur.com/oVFMyJU.png"),
9 | iconStyle("panoRequest", 0.5, "https://i.imgur.com/uj6HMxZ.png"),
10 | requests.filter((r) => r.status === "open").map(requestPlacemark),
11 | ];
12 |
13 | return kml(elements);
14 | }
15 |
16 | function requestPlacemark(request) {
17 | const { id, building, panoramas } = request;
18 | const dashboardLink = `Dashboard →`;
19 | const ticketLink = `Tickets →`;
20 | return `
21 |
22 | ${id}
23 |
24 | ${data("ID", id)}
25 | ${data("Date", request.date.toDateString())}
26 | ${data("Roof", request.roof_access ? "Yes" : "No")}
27 | ${data("Links", `${dashboardLink} ${ticketLink}`)}
28 | ${panoData(panoramas || [])}
29 |
30 |
31 | absolute
32 | ${building.lng},${building.lat},${building.alt}
33 |
34 | ${panoramas ? "#panoRequest" : "#request"}
35 | `;
36 | }
37 |
38 | const getRequestsQuery = `SELECT
39 | requests.id,
40 | requests.status,
41 | requests.date,
42 | requests.roof_access,
43 | (SELECT to_json(buildings.*) FROM buildings WHERE buildings.id = requests.building_id) AS building,
44 | (SELECT json_agg(panoramas.*) FROM panoramas WHERE panoramas.request_id = requests.id) AS panoramas
45 | FROM
46 | requests
47 | LEFT JOIN buildings ON requests.building_id = buildings.id
48 | WHERE
49 | requests.status = 'open'
50 | GROUP BY
51 | requests.id
52 | ORDER BY
53 | date DESC`;
54 |
55 | async function getRequests() {
56 | return performQuery(getRequestsQuery);
57 | }
58 |
--------------------------------------------------------------------------------
/src/kml/utils.js:
--------------------------------------------------------------------------------
1 | const BASE_URL =
2 | process.env.NODE_ENV === "production"
3 | ? "https://api.nycmesh.net"
4 | : "http://localhost:9000";
5 |
6 | export function networkLink(name, endpoint) {
7 | return `
8 | ${name}
9 |
10 | ${BASE_URL}${endpoint}
11 |
12 | `;
13 | }
14 |
15 | export function kml(document) {
16 | return `
17 |
18 |
19 | ${document}
20 |
21 | `;
22 | }
23 |
24 | export function data(name, value) {
25 | return `
26 | ${value}
27 | `;
28 | }
29 |
30 | export function panoData(panoramas) {
31 | return data(
32 | "Panos",
33 | panoramas
34 | .map(
35 | (panorama) =>
36 | `
`
37 | )
38 | .join("")
39 | );
40 | }
41 |
42 | export function iconStyle(id, scale, icon) {
43 | return ``;
55 | }
56 |
57 | export function lineStyle(id, color, width) {
58 | return ``;
67 | }
68 |
--------------------------------------------------------------------------------
/src/routes/appointments.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import { getAppointment, getAppointments } from "../db/appointments";
4 | import { checkAuth } from "../auth";
5 |
6 | const router = Router({
7 | caseSensitive: true,
8 | });
9 |
10 | router.get("", async (req, res, next) => {
11 | await checkAuth(req.headers);
12 | const appointments = await getAppointments();
13 | res.json(appointments);
14 | });
15 |
16 | router.get("/:id", async (req, res, next) => {
17 | await checkAuth(req.headers);
18 | const appointment = await getAppointment(req.params.id);
19 | res.json(appointment);
20 | });
21 |
22 | export default router;
23 |
--------------------------------------------------------------------------------
/src/routes/buildings.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import { getBuildings, getBuilding, updateBuilding } from "../db/buildings";
4 | import { checkAuth } from "../auth";
5 |
6 | const router = Router({
7 | caseSensitive: true,
8 | });
9 |
10 | router.get("/", async (req, res) => {
11 | await checkAuth(req.headers);
12 | const buildings = await getBuildings();
13 | res.json(buildings);
14 | });
15 |
16 | router.get("/:id", async (req, res) => {
17 | await checkAuth(req.headers);
18 | const building = await getBuilding(req.params.id);
19 | res.json(building);
20 | });
21 |
22 | router.post("/:id", async (req, res) => {
23 | await checkAuth(req.headers);
24 | const building = await updateBuilding(req.params.id, req.body);
25 | res.json(building);
26 | });
27 |
28 | export default router;
29 |
--------------------------------------------------------------------------------
/src/routes/device_types.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | getDevices,
5 | getDevice,
6 | createDevice,
7 | updateDevice,
8 | deleteDevice,
9 | } from "../db/devices";
10 | import { checkAuth } from "../auth";
11 |
12 | const router = Router({
13 | caseSensitive: true,
14 | });
15 |
16 | router.get("/search", async (req, res) => {
17 | await checkAuth(req.headers);
18 | const types = await authorizedSearchDeviceTypes(req.query.s);
19 | res.json(types);
20 | });
21 |
22 | export default router;
23 |
--------------------------------------------------------------------------------
/src/routes/devices.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | getDevices,
5 | getDevice,
6 | createDevice,
7 | updateDevice,
8 | deleteDevice,
9 | } from "../db/devices";
10 | import { checkAuth } from "../auth";
11 |
12 | const router = Router({
13 | caseSensitive: true,
14 | });
15 |
16 | router.post("/", async (req, res) => {
17 | await checkAuth(req.headers);
18 | const device = await authorizedCreateDevice(req.body);
19 | res.json(device);
20 | });
21 |
22 | router.get("/:id", async (req, res) => {
23 | await checkAuth(req.headers);
24 | const types = await authorizedGetDevice(req.params.id);
25 | res.json(types);
26 | });
27 |
28 | export default router;
29 |
--------------------------------------------------------------------------------
/src/routes/geojson.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import { getLinksGeoJSON } from "../geojson/links";
4 | import { getNodesGeoJSON } from "../geojson/nodes";
5 | import { checkAuth } from "../auth";
6 |
7 | const router = Router({
8 | caseSensitive: true,
9 | });
10 |
11 | router.get("/links", async (req, res) => {
12 | const geoJSON = await getLinksGeoJSON();
13 | res.json(geoJSON);
14 | });
15 |
16 | router.get("/nodes", async (req, res) => {
17 | const geoJSON = await getNodesGeoJSON();
18 | res.json(geoJSON);
19 | });
20 |
21 | export default router;
22 |
--------------------------------------------------------------------------------
/src/routes/kml.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import { getKML } from "../kml";
4 | import { getAppointmentsKML } from "../kml/appointments";
5 | import { getLosKML } from "../kml/los";
6 | import { getNodesKML } from "../kml/nodes";
7 | import { getRequestsKML } from "../kml/requests";
8 |
9 | const router = Router({
10 | caseSensitive: true,
11 | });
12 |
13 | router.get("/", async (req, res) => {
14 | const kml = await getKML();
15 | res
16 | .set({
17 | "Content-Type": "text/xml",
18 | "Content-Disposition": `attachment; filename="nycmesh.kml"`,
19 | })
20 | .send(kml);
21 | });
22 |
23 | router.get("/appointments", async (req, res) => {
24 | const kml = await getAppointmentsKML(req.params);
25 | res.set("Content-Type", "text/xml").send(kml);
26 | });
27 |
28 | router.get("/los", async (req, res) => {
29 | const kml = await getLosKML(req.params);
30 | res.set("Content-Type", "text/xml").send(kml);
31 | });
32 |
33 | router.get("/nodes", async (req, res) => {
34 | const kml = await getNodesKML(req.params);
35 | res.set("Content-Type", "text/xml").send(kml);
36 | });
37 |
38 | router.get("/requests", async (req, res) => {
39 | const kml = await getRequestsKML(req.params);
40 | res.set("Content-Type", "text/xml").send(kml);
41 | });
42 |
43 | export default router;
44 |
--------------------------------------------------------------------------------
/src/routes/links.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | getLinks,
5 | getLink,
6 | createLink,
7 | updateLink,
8 | deleteLink,
9 | } from "../db/links";
10 | import { checkAuth } from "../auth";
11 |
12 | const router = Router({
13 | caseSensitive: true,
14 | });
15 |
16 | router.get("/", async (req, res) => {
17 | const links = await getLinks();
18 | res.json(links);
19 | });
20 |
21 | router.get("/:id", async (req, res) => {
22 | const link = await getLink(req.params.id);
23 | res.json(link);
24 | });
25 |
26 | router.post("/", async (req, res) => {
27 | await checkAuth(req.headers);
28 | const links = await createLink(req.body);
29 | res.json(links);
30 | });
31 |
32 | router.delete("/:id", async (req, res) => {
33 | await checkAuth(req.headers);
34 | const link = await deleteLink(req.params.id);
35 | res.json(link);
36 | });
37 |
38 | export default router;
39 |
--------------------------------------------------------------------------------
/src/routes/los.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import { getLos } from "../db/los";
4 |
5 | const router = Router({
6 | caseSensitive: true,
7 | });
8 |
9 | router.get("/", async (req, res) => {
10 | const los = await getLos(parseInt(req.query.bin));
11 | res.json(los);
12 | });
13 |
14 | export default router;
15 |
--------------------------------------------------------------------------------
/src/routes/map.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import { getMap, authorizedGetMap } from "../db/map";
4 | import { checkAuth } from "../auth";
5 |
6 | const router = Router({
7 | caseSensitive: true,
8 | });
9 |
10 | router.get("/", async (req, res) => {
11 | let map;
12 | try {
13 | await checkAuth(req.headers);
14 | map = await authorizedGetMap();
15 | } catch (error) {
16 | map = await getMap();
17 | }
18 | res.json(map);
19 | });
20 |
21 | export default router;
22 |
--------------------------------------------------------------------------------
/src/routes/members.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | getMembers,
5 | getMember,
6 | createMember,
7 | updateMember,
8 | } from "../db/members";
9 | import { authorizedSearchMembers } from "../db/search";
10 | import { checkAuth } from "../auth";
11 |
12 | const router = Router({
13 | caseSensitive: true,
14 | });
15 |
16 | router.get("/", async (req, res) => {
17 | await checkAuth(req.headers);
18 | const members = await getMembers();
19 | res.json(members);
20 | });
21 |
22 | router.get("/search", async (req, res) => {
23 | await checkAuth(req.headers);
24 | const members = await authorizedSearchMembers(req.query.s);
25 | res.json(members);
26 | });
27 |
28 | router.get("/:id", async (req, res) => {
29 | await checkAuth(req.headers);
30 | const member = await getMember(req.params.id);
31 | res.json(member);
32 | });
33 |
34 | export default router;
35 |
--------------------------------------------------------------------------------
/src/routes/memberships.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | createMembership,
5 | destroyMembership,
6 | findMembership,
7 | } from "../db/memberships";
8 | import { checkAuth } from "../auth";
9 |
10 | const router = Router({
11 | caseSensitive: true,
12 | });
13 |
14 | router.delete("/:id", async (req, res) => {
15 | await checkAuth(req.headers);
16 | const membership = await destroyMembership(req.params.id);
17 |
18 | if (!membership) {
19 | throw new Error("Not found");
20 | }
21 |
22 | res.json({});
23 | });
24 |
25 | export default router;
26 |
--------------------------------------------------------------------------------
/src/routes/nodes.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | getNodes,
5 | getNode,
6 | authorizedGetNode,
7 | createNode,
8 | updateNode,
9 | } from "../db/nodes";
10 | import { findMembership, createMembership } from "../db/memberships";
11 | import { checkAuth } from "../auth";
12 |
13 | const router = Router({
14 | caseSensitive: true,
15 | });
16 |
17 | router.get("/", async (req, res) => {
18 | const nodes = await getNodes();
19 | res.json(nodes);
20 | });
21 |
22 | router.get("/:id", async (req, res) => {
23 | let node;
24 | try {
25 | await checkAuth(req.headers);
26 | node = await authorizedGetNode(req.params.id);
27 | } catch (error) {
28 | node = await getNode(req.params.id);
29 | }
30 | res.json(node);
31 | });
32 |
33 | router.post("/", async (req, res) => {
34 | await checkAuth(req.headers);
35 | const node = await createNode(req.body);
36 | res.json(node);
37 | });
38 |
39 | router.post("/:id", async (req, res) => {
40 | await checkAuth(req.headers);
41 | const node = await updateNode(req.params.id, req.body);
42 | res.json(node);
43 | });
44 |
45 | router.post("/:node_id/memberships", async (req, res) => {
46 | await checkAuth(req.headers);
47 | const membership = await findMembership(
48 | req.params.node_id,
49 | req.body.member_id
50 | );
51 |
52 | if (membership) {
53 | res.status(422).json({
54 | error: "A membership with that node_id and member_id already exists",
55 | });
56 | return;
57 | }
58 |
59 | await createMembership(req.params.node_id, req.body);
60 | const node = await authorizedGetNode(req.params.node_id);
61 | res.json(node);
62 | });
63 |
64 | export default router;
65 |
--------------------------------------------------------------------------------
/src/routes/panos.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import { checkAuth } from "../auth";
4 |
5 | import { createPano, getUploadURL } from "../db/panos";
6 | import { getRequestFromToken } from "../db/requests";
7 | import SlackClient from "../slack/client";
8 |
9 | const slackClient = new SlackClient(process.env.SLACK_TOKEN);
10 |
11 | const router = Router({
12 | caseSensitive: true,
13 | });
14 |
15 | router.post("/upload", async (req, res) => {
16 | if (req.params.token) {
17 | await checkToken(req.params.token);
18 | } else {
19 | await checkAuth(req.headers);
20 | }
21 | const url = await getUploadURL(req.body.name, req.body.type);
22 | res.json({ url });
23 | });
24 |
25 | router.post("/createPano", async (req, res) => {
26 | const params = {
27 | url: req.body.panoURL,
28 | };
29 |
30 | if (req.params.token) {
31 | const request = await getRequestFromToken();
32 | params.request_id = request.id;
33 | } else {
34 | await checkAuth(req.headers);
35 | params.request_id = request.body.request_id;
36 | }
37 |
38 | const pano = await createPano(params, slackClient);
39 | res.json(pano);
40 | });
41 |
42 | export default router;
43 |
--------------------------------------------------------------------------------
/src/routes/requests.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | getRequests,
5 | getRequest,
6 | createRequest,
7 | updateRequest,
8 | getRequestFromToken,
9 | } from "../db/requests";
10 | import { checkAuth } from "../auth";
11 | import SlackClient from "../slack/client";
12 |
13 | const slackClient = new SlackClient(process.env.SLACK_TOKEN);
14 |
15 | const router = Router({
16 | caseSensitive: true,
17 | });
18 |
19 | router.get("/", async (req, res, next) => {
20 | await checkAuth(req.headers);
21 | const requests = await getRequests();
22 | res.json(requests);
23 | });
24 |
25 | router.get("/:id", async (req, res, next) => {
26 | await checkAuth(req.headers);
27 | const request = await getRequest(req.params.id);
28 | res.json(request);
29 | });
30 |
31 | router.post("/", async (req, res, next) => {
32 | const request = await createRequest(req.body, slackClient);
33 | if (req.body.success_url) {
34 | res.redirect(200, success_url);
35 | } else {
36 | res.json(request);
37 | }
38 | });
39 |
40 | router.post("/token/:token", async (req, res, next) => {
41 | const request = await getRequestFromToken(req.params.token);
42 | res.json(request);
43 | });
44 |
45 | export default router;
46 |
--------------------------------------------------------------------------------
/src/routes/search.js:
--------------------------------------------------------------------------------
1 | import { Router } from "express";
2 |
3 | import {
4 | getSearch,
5 | authorizedSearchMembers,
6 | authorizedSearchDeviceTypes,
7 | } from "../db/search";
8 | import { checkAuth } from "../auth";
9 |
10 | const router = Router({
11 | caseSensitive: true,
12 | });
13 |
14 | router.get("/", async (req, res) => {
15 | let results;
16 | try {
17 | await checkAuth(req.headers);
18 | const authorized = true;
19 | results = await getSearch(req.query.s, authorized);
20 | } catch (error) {
21 | results = await getSearch(req.query.s);
22 | }
23 | res.json(results);
24 | });
25 |
26 | export default router;
27 |
--------------------------------------------------------------------------------
/src/routes/webhooks.js:
--------------------------------------------------------------------------------
1 | import express, { Router } from "express";
2 | import Acuity from "acuityscheduling";
3 |
4 | import { acuityWebhook } from "../webhooks/acuity";
5 | import SlackClient from "../slack/client";
6 |
7 | const router = Router({
8 | caseSensitive: true,
9 | });
10 |
11 | const verifyMiddleware = express.urlencoded({
12 | verify: Acuity.bodyParserVerify(process.env.ACUITY_API_KEY),
13 | });
14 |
15 | const slackClient = new SlackClient(process.env.SLACK_TOKEN);
16 |
17 | router.post("/acuity", verifyMiddleware, async (req, res) => {
18 | await acuityWebhook(req.body, slackClient);
19 | res.send({});
20 | });
21 |
22 | export default router;
23 |
--------------------------------------------------------------------------------
/src/slack/client.js:
--------------------------------------------------------------------------------
1 | import { WebClient } from "@slack/web-api";
2 |
3 | export default class Client {
4 | constructor(token) {
5 | this.slack = new WebClient(token);
6 | }
7 |
8 | async getChannel(channelName) {
9 | const { channels } = await this.slack.conversations.list({
10 | types: "public_channel,private_channel",
11 | limit: 1000, // TODO: Cursor support
12 | });
13 | const [channel] = channels.filter((c) => c.name === channelName);
14 | return channel;
15 | }
16 |
17 | async postMessage() {
18 | return this.slack.chat.postMessage(...arguments);
19 | }
20 |
21 | async update() {
22 | return this.slack.chat.update(...arguments);
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/src/slack/index.js:
--------------------------------------------------------------------------------
1 | import { format } from "date-fns";
2 |
3 | const dateFmtString = "EEEE, MMM d h:mm aa";
4 |
5 | export async function requestMessage(
6 | client,
7 | request,
8 | building,
9 | visibleNodes,
10 | buildingNodes
11 | ) {
12 | return sendMessage(
13 | client,
14 | process.env.SLACK_REQUEST_CHANNEL,
15 | requestMessageContent(request, building, visibleNodes, buildingNodes)
16 | );
17 | }
18 |
19 | export async function panoMessage(client, pano, request) {
20 | const messageContent = panoMessageContent(pano, request);
21 | if (request.slack_ts) {
22 | const channel = await client.getChannel(process.env.SLACK_REQUEST_CHANNEL);
23 | return client.postMessage({
24 | channel: channel.id,
25 | thread_ts: request.slack_ts,
26 | reply_broadcast: true,
27 | ...messageContent,
28 | });
29 | } else {
30 | return sendMessage(
31 | client,
32 | process.env.SLACK_REQUEST_CHANNEL,
33 | messageContent
34 | );
35 | }
36 | }
37 |
38 | export async function installMessage(client, appointment) {
39 | const slackRes = await sendMessage(
40 | client,
41 | process.env.SLACK_INSTALL_CHANNEL,
42 | installMessageContent(appointment)
43 | );
44 |
45 | // Reply in thread
46 | const channel = await client.getChannel(process.env.SLACK_INSTALL_CHANNEL);
47 | await client.postMessage({
48 | channel: channel.id,
49 | thread_ts: slackRes.ts,
50 | ...installMessageReplyContent(appointment),
51 | });
52 |
53 | return slackRes;
54 | }
55 |
56 | export async function rescheduleMessage(client, appointment, slackTS) {
57 | const channel = await client.getChannel(process.env.SLACK_INSTALL_CHANNEL);
58 | if (!channel) {
59 | console.log(`#${channelName} not found`);
60 | return;
61 | }
62 |
63 | const { text, blocks } = installMessageContent(appointment);
64 | const formattedDate = format(appointment.date, dateFmtString);
65 |
66 | await client.update({
67 | channel: channel.id,
68 | ts: slackTS,
69 | text,
70 | blocks,
71 | });
72 |
73 | return client.postMessage({
74 | channel: channel.id,
75 | thread_ts: slackTS,
76 | reply_broadcast: true,
77 | text: `Rescheduled to ${formattedDate}`,
78 | });
79 | }
80 |
81 | async function sendMessage(client, channelName, messageContent) {
82 | const channel = await client.getChannel(channelName);
83 | if (!channel) {
84 | console.log(`#${channelName} not found`);
85 | return;
86 | }
87 |
88 | const { text, blocks } = messageContent;
89 |
90 | return client.postMessage({
91 | channel: channel.id,
92 | text,
93 | blocks,
94 | });
95 | }
96 |
97 | function requestMessageContent(request, building, visibleNodes, buildingNodes) {
98 | const { id, roof_access } = request;
99 | const { address, alt } = building;
100 |
101 | const dashboardURL = getDashboardURL("requests", id);
102 | const titleText = address;
103 | const title = `*<${dashboardURL}|${titleText}>*`;
104 |
105 | const altString = alt
106 | ? `${Math.round(alt * 0.328)}m · `
107 | : "Building not found · ";
108 | const roofString = roof_access ? "Roof access · " : "No roof access · ";
109 | const losString = getLoSString(visibleNodes);
110 | const info = `${altString}${roofString}${losString}`;
111 |
112 | const text = `${title}\n${info}`;
113 | const fallbackText = address;
114 |
115 | const blocks = [markdownSection(text)];
116 |
117 | if (buildingNodes.length) {
118 | blocks.push(contextSection("✅ Node in building!"));
119 | }
120 |
121 | return {
122 | blocks,
123 | text: fallbackText,
124 | };
125 | }
126 |
127 | function panoMessageContent(pano, request) {
128 | const imageText = request.slack_ts
129 | ? "Panorama"
130 | : `Request ${request.id} - ${request.building.address}`;
131 | const blocks = [
132 | {
133 | type: "image",
134 | title: {
135 | type: "plain_text",
136 | text: imageText,
137 | },
138 | image_url: encodeURI(pano.url),
139 | alt_text: imageText,
140 | },
141 | ];
142 |
143 | const text = `New pano for request ${pano.request_id}!`;
144 | return {
145 | blocks,
146 | text,
147 | };
148 | }
149 |
150 | function installMessageContent(appointment) {
151 | const { building, member, request } = appointment;
152 | const formattedDate = format(appointment.date, dateFmtString);
153 | const fallbackText = `${request.id} - ${member.name} - ${appointment.type}\n${formattedDate}\n${building.address}`;
154 | const line1 = ``;
155 | const blocks = [
156 | markdownSection(`${line1}\n${formattedDate}\n${building.address}`),
157 | ];
158 |
159 | return {
160 | blocks,
161 | text: fallbackText,
162 | };
163 | }
164 |
165 | function installMessageReplyContent(appointment) {
166 | const { member } = appointment;
167 | // const introText = `New ${appointment.type}:\n*${building.address}*\n${formattedDate}`;
168 | const nameText = `*Name:*\t${member.name}\n`;
169 | const phoneText = `*Phone:*\t\n`;
170 | const emailText = `*Email:*\t${member.email}\n`;
171 | const notesText = appointment.notes ? `*Notes:*\t${appointment.notes}\n` : "";
172 |
173 | const fallbackText = `Name: ${member.name}\nPhone: ${member.phone}\nEmail: ${member.email}\nNotes: ${appointment.notes}`;
174 | const blocks = [
175 | markdownSection(`${nameText}${phoneText}${emailText}${notesText}`),
176 | ];
177 |
178 | return {
179 | blocks,
180 | text: fallbackText,
181 | };
182 | }
183 |
184 | function markdownSection(text) {
185 | return {
186 | type: "section",
187 | text: {
188 | type: "mrkdwn",
189 | text,
190 | },
191 | };
192 | }
193 |
194 | function contextSection(text) {
195 | return {
196 | type: "context",
197 | elements: [
198 | {
199 | type: "mrkdwn",
200 | text,
201 | },
202 | ],
203 | };
204 | }
205 |
206 | function getLoSString(visibleNodes) {
207 | if (!visibleNodes) {
208 | return "LoS search failed";
209 | }
210 |
211 | if (!visibleNodes.length) {
212 | return "No LoS";
213 | }
214 |
215 | const isKnownDevice = (device) => device.type.name !== "Unknown";
216 | const hasDevice = (node) => node.devices.filter(isKnownDevice).length;
217 | const toIdentifier = (node) => node.name || node.id;
218 | return visibleNodes.filter(hasDevice).map(toIdentifier).join(", ");
219 | }
220 |
221 | function getDashboardURL(type, id) {
222 | return `https://dashboard.nycmesh.net/map/${type}/${id}`;
223 | }
224 |
225 | function getMapURL(id) {
226 | return `https://www.nycmesh.net/map/nodes/${id}`;
227 | }
228 |
229 | function getEarthURL(building) {
230 | const { address, lat, lng, alt } = building;
231 | const earthAddress = address.replace(/,/g, "").replace(/ /g, "+");
232 | return `https://earth.google.com/web/search/${earthAddress}/@${lat},${lng},${alt}a,300d,40y,0.6h,65t,0r`;
233 | }
234 |
235 | function getLosURL(building) {
236 | const { address, bin, lat, lng } = building;
237 | const URIAddress = encodeURIComponent(address);
238 | return `https://los.nycmesh.net/search?address=${URIAddress}&bin=${bin}&lat=${lat}&lng=${lng}`;
239 | }
240 |
241 | function getTicketURL(id) {
242 | return `https://support.nycmesh.net/scp/tickets.php?a=search&query=${id}`;
243 | }
244 |
--------------------------------------------------------------------------------
/src/slack/index.spec.js:
--------------------------------------------------------------------------------
1 | import {
2 | requestMessage,
3 | panoMessage,
4 | installMessage,
5 | rescheduleMessage,
6 | } from ".";
7 |
8 | describe("requestMessage", () => {
9 | describe("if the member has roof access", () => {
10 | it("sends a message to the join requests channel", async () => {
11 | const slackClient = mockSlackClient();
12 | slackClient.getChannel.mockResolvedValue({
13 | name: process.env.SLACK_REQUEST_CHANNEL,
14 | id: 1,
15 | });
16 |
17 | const request = { id: 4321, roof_access: true };
18 | const building = {
19 | address: "123 4th Street",
20 | alt: 300,
21 | lat: 32.1234542,
22 | lng: 188.029342,
23 | bin: "F4SF0J32",
24 | };
25 | const visibleNodes = [
26 | { devices: [{ type: { name: "LBE" } }], id: 5544 },
27 | { devices: [{ type: { name: "OmniTik" } }], id: 312 },
28 | ];
29 |
30 | const buildingNodes = [];
31 |
32 | await requestMessage(
33 | slackClient,
34 | request,
35 | building,
36 | visibleNodes,
37 | buildingNodes
38 | );
39 |
40 | expect(slackClient.getChannel).toHaveBeenCalledWith(
41 | process.env.SLACK_REQUEST_CHANNEL
42 | );
43 | expect(slackClient.postMessage).toHaveBeenCalled();
44 | const {
45 | channel,
46 | text,
47 | blocks,
48 | } = slackClient.postMessage.mock.calls[0][0];
49 | expect(channel).toBe(1);
50 | expect(text).toBe("123 4th Street");
51 | const lines = blocks[0].text.text.split("\n");
52 | expect(lines).toHaveLength(2);
53 | expect(lines[0]).toBe(
54 | "**"
55 | );
56 | expect(lines[1]).toBe("98m · Roof access · 5544, 312");
57 | });
58 | });
59 |
60 | describe("if the member does not have roof access", () => {
61 | it("sends a message to the join requests channel", async () => {
62 | const slackClient = mockSlackClient();
63 | slackClient.getChannel.mockResolvedValue({
64 | name: process.env.SLACK_REQUEST_CHANNEL,
65 | id: 1,
66 | });
67 |
68 | const request = { roof_access: false };
69 | const building = { address: "123 4th Street" };
70 |
71 | await requestMessage(slackClient, request, building, [], []);
72 |
73 | expect(slackClient.postMessage).toHaveBeenCalled();
74 | const { blocks } = slackClient.postMessage.mock.calls[0][0];
75 | expect(blocks[0].text.text).toContain("No roof access");
76 | });
77 | });
78 |
79 | describe("if there are no visible nodes", () => {
80 | it("sends a message to the join requests channel", async () => {
81 | const slackClient = mockSlackClient();
82 | slackClient.getChannel.mockResolvedValue({
83 | name: process.env.SLACK_REQUEST_CHANNEL,
84 | id: 1,
85 | });
86 |
87 | const building = { address: "123 4th Street" };
88 |
89 | await requestMessage(slackClient, {}, building, [], []);
90 |
91 | expect(slackClient.postMessage).toHaveBeenCalled();
92 | const { blocks } = slackClient.postMessage.mock.calls[0][0];
93 | expect(blocks[0].text.text).toContain("No LoS");
94 | });
95 | });
96 |
97 | describe("if visible nodes is null", () => {
98 | it("sends a message to the join requests channel", async () => {
99 | const slackClient = mockSlackClient();
100 | slackClient.getChannel.mockResolvedValue({
101 | name: process.env.SLACK_REQUEST_CHANNEL,
102 | id: 1,
103 | });
104 |
105 | const building = { address: "123 4th Street" };
106 |
107 | await requestMessage(slackClient, {}, building, null, []);
108 |
109 | expect(slackClient.postMessage).toHaveBeenCalled();
110 | const { blocks } = slackClient.postMessage.mock.calls[0][0];
111 | expect(blocks[0].text.text).toContain("LoS search failed");
112 | });
113 | });
114 |
115 | describe("if building nodes is not empty", () => {
116 | it("adds node in building to the slack message", async () => {
117 | const slackClient = mockSlackClient();
118 | slackClient.getChannel.mockResolvedValue({
119 | name: process.env.SLACK_REQUEST_CHANNEL,
120 | id: 1,
121 | });
122 |
123 | const building = { address: "123 4th Street" };
124 | const buildingNodes = [{ id: 123 }];
125 |
126 | await requestMessage(slackClient, {}, building, [], buildingNodes);
127 |
128 | expect(slackClient.postMessage).toHaveBeenCalled();
129 | const { blocks } = slackClient.postMessage.mock.calls[0][0];
130 | expect(blocks[1].elements[0].text).toEqual("✅ Node in building!");
131 | });
132 | });
133 |
134 | describe("if building nodes is empty", () => {
135 | it("adds node in building to the slack message", async () => {
136 | const slackClient = mockSlackClient();
137 | slackClient.getChannel.mockResolvedValue({
138 | name: process.env.SLACK_REQUEST_CHANNEL,
139 | id: 1,
140 | });
141 |
142 | const building = { address: "123 4th Street" };
143 | const buildingNodes = [];
144 |
145 | await requestMessage(slackClient, {}, building, [], buildingNodes);
146 |
147 | expect(slackClient.postMessage).toHaveBeenCalled();
148 | const { blocks } = slackClient.postMessage.mock.calls[0][0];
149 | expect(blocks.length).toEqual(1);
150 | });
151 | });
152 |
153 | describe("if the channel is not found", () => {
154 | it("does not send a message", async () => {
155 | const slackClient = mockSlackClient();
156 | slackClient.getChannel.mockResolvedValue(null);
157 | const consoleLog = console.log;
158 | console.log = jest.fn();
159 |
160 | await requestMessage(slackClient, {}, { address: "" }, [], []);
161 |
162 | expect(slackClient.postMessage).not.toHaveBeenCalled();
163 |
164 | console.log = consoleLog;
165 | });
166 | });
167 | });
168 |
169 | describe("panoMessage", () => {
170 | it("sends a message to the panoramas channel", async () => {
171 | const slackClient = mockSlackClient();
172 | slackClient.getChannel.mockResolvedValue({
173 | name: process.env.SLACK_REQUEST_CHANNEL,
174 | id: 2,
175 | });
176 | const url = "https://example.com";
177 | const requestId = 1;
178 |
179 | await panoMessage(
180 | slackClient,
181 | { url, request_id: requestId },
182 | { id: requestId, slack_ts: "123" }
183 | );
184 |
185 | expect(slackClient.getChannel).toHaveBeenCalledWith(
186 | process.env.SLACK_REQUEST_CHANNEL
187 | );
188 | expect(slackClient.postMessage).toHaveBeenCalled();
189 | const { channel, text, blocks } = slackClient.postMessage.mock.calls[0][0];
190 | expect(channel).toBe(2);
191 | expect(text).toBe("New pano for request 1!");
192 | expect(blocks[0].image_url).toBe("https://example.com");
193 | });
194 | });
195 |
196 | describe("installMessage", () => {
197 | it("sends a message to the install channel", async () => {
198 | const slackClient = mockSlackClient();
199 | slackClient.getChannel.mockResolvedValue({
200 | name: process.env.SLACK_INSTALL_CHANNEL,
201 | id: 3,
202 | });
203 | slackClient.postMessage.mockResolvedValue({ ts: 1234 });
204 |
205 | const appointment = {
206 | id: 12345,
207 | building: {
208 | address: "567 8th Street",
209 | alt: 250,
210 | lat: 91.423,
211 | lng: 11.121,
212 | bin: "8FS3",
213 | },
214 | member: {
215 | name: "First Last",
216 | phone: "800-555-5555",
217 | email: "first@last.com",
218 | },
219 | request: {
220 | id: 123,
221 | },
222 | date: 946713600000,
223 | request_id: 6678,
224 | node_id: 6678,
225 | type: "install",
226 | notes: "Omni only",
227 | };
228 |
229 | await installMessage(slackClient, appointment);
230 |
231 | expect(slackClient.getChannel).toHaveBeenCalledWith(
232 | process.env.SLACK_INSTALL_CHANNEL
233 | );
234 | const { channel, blocks, text } = slackClient.postMessage.mock.calls[0][0];
235 | expect(channel).toBe(3);
236 | expect(blocks).toHaveLength(1);
237 | expect(blocks[0].text.text).toBe(
238 | "\nSaturday, Jan 1 8:00 AM\n567 8th Street"
239 | );
240 | expect(text).toBe(
241 | "123 - First Last - install\nSaturday, Jan 1 8:00 AM\n567 8th Street"
242 | );
243 | });
244 | });
245 |
246 | describe("rescheduleMessage", () => {
247 | it("updates the original message in the install channel", async () => {
248 | const slackClient = mockSlackClient();
249 | slackClient.getChannel.mockResolvedValue({
250 | name: process.env.SLACK_INSTALL_CHANNEL,
251 | id: 3,
252 | });
253 |
254 | const appointment = {
255 | id: 12345,
256 | building: { address: "567 8th Street" },
257 | member: {
258 | name: "First Last",
259 | phone: "800-555-5555",
260 | email: "first@last.com",
261 | },
262 | request: {
263 | id: 123,
264 | },
265 | date: 946713600000,
266 | type: "survey",
267 | };
268 |
269 | await rescheduleMessage(slackClient, appointment, 2394587345);
270 |
271 | expect(slackClient.getChannel).toHaveBeenCalledWith(
272 | process.env.SLACK_INSTALL_CHANNEL
273 | );
274 | expect(slackClient.update).toHaveBeenCalled();
275 | const { channel, ts, blocks, text } = slackClient.update.mock.calls[0][0];
276 | expect(channel).toBe(3);
277 | expect(ts).toBe(2394587345);
278 | expect(blocks).toHaveLength(1);
279 | expect(text).toBe(
280 | "123 - First Last - survey\nSaturday, Jan 1 8:00 AM\n567 8th Street"
281 | );
282 | });
283 |
284 | it("posts a rescheduling message in a thread on the original message", async () => {
285 | const slackClient = mockSlackClient();
286 | slackClient.getChannel.mockResolvedValue({
287 | name: process.env.SLACK_INSTALL_CHANNEL,
288 | id: 3,
289 | });
290 |
291 | const appointment = {
292 | id: 12345,
293 | building: { address: "567 8th Street" },
294 | member: {
295 | name: "First Last",
296 | phone: "800-555-5555",
297 | email: "first@last.com",
298 | },
299 | request: {
300 | id: 123,
301 | },
302 | date: 946713600000,
303 | };
304 |
305 | await rescheduleMessage(slackClient, appointment, 2394587345);
306 |
307 | expect(slackClient.postMessage).toHaveBeenCalled();
308 | const {
309 | channel,
310 | thread_ts,
311 | reply_broadcast,
312 | text,
313 | } = slackClient.postMessage.mock.calls[0][0];
314 | expect(channel).toBe(3);
315 | expect(thread_ts).toBe(2394587345);
316 | expect(reply_broadcast).toBe(true);
317 | expect(text).toBe("Rescheduled to Saturday, Jan 1 8:00 AM");
318 | });
319 | });
320 |
321 | function mockSlackClient() {
322 | return {
323 | getChannel: jest.fn(),
324 | postMessage: jest.fn(),
325 | update: jest.fn(),
326 | };
327 | }
328 |
--------------------------------------------------------------------------------
/src/v1.js:
--------------------------------------------------------------------------------
1 | import express, { Router } from "express";
2 | import cors from "cors";
3 | import serverless from "serverless-http";
4 |
5 | import appointments from "./routes/appointments";
6 | import buildings from "./routes/buildings";
7 | import device_types from "./routes/device_types";
8 | import devices from "./routes/devices";
9 | import links from "./routes/links";
10 | import los from "./routes/los";
11 | import members from "./routes/members";
12 | import memberships from "./routes/memberships";
13 | import map from "./routes/map";
14 | import nodes from "./routes/nodes";
15 | import panos from "./routes/panos";
16 | import requests from "./routes/requests";
17 | import search from "./routes/search";
18 | import kml from "./routes/kml";
19 | import geojson from "./routes/geojson";
20 | import webhooks from "./routes/webhooks";
21 |
22 | const ROOT = "/v1";
23 | const app = express(ROOT);
24 | const router = Router({ caseSensitive: true });
25 |
26 | app.set("etag", false);
27 | app.disable("x-powered-by");
28 | app.use(cors());
29 | app.use(express.json());
30 | app.use(express.urlencoded({ extended: true }));
31 | app.use(ROOT, router);
32 | app.use(handleErrors);
33 |
34 | router.use("/appointments", appointments);
35 | router.use("/buildings", buildings);
36 | router.use("/device_types", device_types);
37 | router.use("/devices", devices);
38 | router.use("/geojson", geojson);
39 | router.use("/kml", kml);
40 | router.use("/links", links);
41 | router.use("/los", los);
42 | router.use("/map", map);
43 | router.use("/members", members);
44 | router.use("/memberships", memberships);
45 | router.use("/nodes", nodes);
46 | router.use("/panos", panos);
47 | router.use("/requests", requests);
48 | router.use("/search", search);
49 | router.use("/webhooks", webhooks);
50 |
51 | function handleErrors(error, req, res, next) {
52 | if (req.body.failure_url) {
53 | res.redirect(303, req.body.failure_url);
54 | } else {
55 | const messageStatus = {
56 | Unauthorized: 401,
57 | "Bad params": 422,
58 | "Not found": 400,
59 | };
60 | const status = messageStatus[error.message] || 500;
61 | res.status(status).json({ error: error.message });
62 | }
63 | }
64 |
65 | export const handler = serverless(app);
66 |
--------------------------------------------------------------------------------
/src/webhooks/acuity.js:
--------------------------------------------------------------------------------
1 | import fetch from "node-fetch";
2 |
3 | import {
4 | getAppointment,
5 | getAppointmentByAcuityId,
6 | createAppointment,
7 | updateAppointment,
8 | } from "../db/appointments";
9 | import { getRequest } from "../db/requests";
10 | import { installMessage } from "../slack";
11 |
12 | export async function acuityWebhook(body, slackClient) {
13 | const { action, id } = body;
14 |
15 | const acuityAppointment = await getAcuityAppointment(id);
16 |
17 | if (!acuityAppointment) {
18 | throw new Error(`Acuity appointment ${id} not found`);
19 | }
20 |
21 | let request_id, member_id, building_id, notes;
22 | try {
23 | const { values } = acuityAppointment.forms[0];
24 | request_id = parseInt(values.filter((v) => v.name === "Node Number")[0]);
25 | const request = await getRequest(request_id);
26 | member_id = request.member.id;
27 | building_id = request.building.id;
28 | notes = String(values.filter((v) => v.name === "Notes")[0]);
29 | } catch (error) {
30 | console.log("Unable to find request", request_id);
31 | }
32 |
33 | if (action === "scheduled") {
34 | const sanitizedType = sanitizeType(acuityAppointment.type);
35 |
36 | // Create appointment in db
37 | const newApointment = await createAppointment({
38 | type: sanitizedType,
39 | date: acuityAppointment.date,
40 | notes,
41 | request_id,
42 | member_id,
43 | building_id,
44 | acuity_id: id,
45 | });
46 |
47 | const fullAppointment = await getAppointment(newApointment.id);
48 |
49 | // Send message to slack
50 | const slackRes = await installMessage(slackClient, fullAppointment);
51 |
52 | // Save slack message ts to db
53 | await updateAppointment({ ...fullAppointment, slack_ts: slackRes.ts });
54 | } else if (action === "rescheduled") {
55 | const appointment = await getAppointmentByAcuityId(id);
56 | await updateAppointment({
57 | ...appointment,
58 | date: acuityAppointment.date,
59 | notes,
60 | });
61 | const updatedAppointment = await getAppointmentByAcuityId(id);
62 |
63 | // Update slack message, post to thread + channel
64 | await rescheduleMessage(
65 | slackClient,
66 | updatedAppointment,
67 | updatedAppointment.slack_ts
68 | );
69 | } else if (action === "canceled") {
70 | // Fetch slack message id from db
71 | // Update slack message
72 | // Post update to thread + channel
73 | } else if (action === "changed") {
74 | // Fetch slack message id from db
75 | // Update slack message
76 | // Post update to thread + channel
77 | }
78 | }
79 |
80 | async function getAcuityAppointment(id) {
81 | const URL = `https://acuityscheduling.com/api/v1/appointments/${id}`;
82 | const userPass = `${process.env.ACUITY_USER_ID}:${process.env.ACUITY_API_KEY}`;
83 | const passBuffer = Buffer.from(userPass);
84 | const pass64 = passBuffer.toString("base64");
85 | const auth = `Basic ${pass64}`;
86 | const headers = { Authorization: auth };
87 | const res = await fetch(URL, { headers });
88 | return res.json();
89 | }
90 |
91 | function sanitizeType(type) {
92 | const typeMap = {
93 | Install: "install",
94 | Support: "support",
95 | "Site survey": "survey",
96 | };
97 | return typeMap[type];
98 | }
99 |
--------------------------------------------------------------------------------
/webpack.functions.js:
--------------------------------------------------------------------------------
1 | const webpack = require("webpack");
2 | const Dotenv = require("dotenv-webpack");
3 |
4 | module.exports = {
5 | plugins: [new Dotenv(), new webpack.IgnorePlugin(/^pg-native$/)],
6 | };
7 |
--------------------------------------------------------------------------------