├── VERSION ├── .rspec ├── .document ├── Gemfile ├── spec ├── spec_helper.rb └── netsuite-rest-client_spec.rb ├── Gemfile.lock ├── .gitignore ├── LICENSE.txt ├── Rakefile ├── netsuite-rest-client.gemspec ├── README.rdoc └── lib ├── netsuite-rest-client.rb └── restlets └── rest.js /VERSION: -------------------------------------------------------------------------------- 1 | 0.1.0 -------------------------------------------------------------------------------- /.rspec: -------------------------------------------------------------------------------- 1 | --color 2 | -------------------------------------------------------------------------------- /.document: -------------------------------------------------------------------------------- 1 | lib/**/*.rb 2 | bin/* 3 | - 4 | features/**/*.feature 5 | LICENSE.txt 6 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source "http://rubygems.org" 2 | # Add dependencies required to use your gem here. 3 | # Example: 4 | # gem "activesupport", ">= 2.3.5" 5 | gem 'rest-client' 6 | gem 'json' 7 | 8 | # Add dependencies to develop your gem here. 9 | # Include everything needed to run rake, tests, features, etc. 10 | group :development do 11 | gem "rspec", "~> 2.4" 12 | gem "bundler", ">= 1.0.0" 13 | gem "jeweler", "~> 1.8.3" 14 | end 15 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib')) 2 | $LOAD_PATH.unshift(File.dirname(__FILE__)) 3 | require 'rspec' 4 | require 'netsuite-rest-client' 5 | 6 | # Requires supporting files with custom matchers and macros, etc, 7 | # in ./support/ and its subdirectories. 8 | Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f} 9 | 10 | RSpec.configure do |config| 11 | 12 | end 13 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: http://rubygems.org/ 3 | specs: 4 | diff-lcs (1.2.5) 5 | git (1.2.5) 6 | jeweler (1.8.3) 7 | bundler (~> 1.0) 8 | git (>= 1.2.5) 9 | rake 10 | rdoc 11 | json (1.6.3) 12 | mime-types (1.17.2) 13 | rake (0.9.2.2) 14 | rdoc (3.12) 15 | json (~> 1.4) 16 | rest-client (1.6.7) 17 | mime-types (>= 1.16) 18 | rspec (2.14.1) 19 | rspec-core (~> 2.14.0) 20 | rspec-expectations (~> 2.14.0) 21 | rspec-mocks (~> 2.14.0) 22 | rspec-core (2.14.8) 23 | rspec-expectations (2.14.5) 24 | diff-lcs (>= 1.1.3, < 2.0) 25 | rspec-mocks (2.14.6) 26 | 27 | PLATFORMS 28 | ruby 29 | 30 | DEPENDENCIES 31 | bundler (>= 1.0.0) 32 | jeweler (~> 1.8.3) 33 | json 34 | rest-client 35 | rspec (~> 2.4) 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # rcov generated 2 | coverage 3 | 4 | # rdoc generated 5 | rdoc 6 | 7 | # yard generated 8 | doc 9 | .yardoc 10 | 11 | # bundler 12 | .bundle 13 | 14 | # jeweler generated 15 | pkg 16 | 17 | # Have editor/IDE/OS specific files you need to ignore? Consider using a global gitignore: 18 | # 19 | # * Create a file at ~/.gitignore 20 | # * Include files you want ignored 21 | # * Run: git config --global core.excludesfile ~/.gitignore 22 | # 23 | # After doing this, these files will be ignored in all your git projects, 24 | # saving you from having to 'pollute' every project you touch with them 25 | # 26 | # Not sure what to needs to be ignored for particular editors/OSes? Here's some ideas to get you started. (Remember, remove the leading # of the line) 27 | # 28 | # For MacOS: 29 | # 30 | .DS_Store 31 | 32 | # For TextMate 33 | #*.tmproj 34 | #tmtags 35 | 36 | # For emacs: 37 | *~ 38 | #\#* 39 | #.\#* 40 | 41 | # For vim: 42 | *.swp 43 | 44 | # For redcar: 45 | #.redcar 46 | 47 | # For rubinius: 48 | #*.rbc 49 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012 Acumen Brands, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /spec/netsuite-rest-client_spec.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper') 2 | 3 | describe "NetsuiteRestClient" do 4 | 5 | describe '#get_saved_search' do 6 | it "should get a saved search" do 7 | nsc = Netsuite::Client.new(ENV['NETSUITE_ACCOUNT_ID'], 8 | ENV['NETSUITE_LOGIN'], 9 | ENV['NETSUITE_PASSWORD'], 10 | ENV['NETSUITE_ROLE_ID']) 11 | res = nsc.get_saved_search('InventoryItem', '678') 12 | res.should_not be_empty 13 | res.should be_kind_of(Array) 14 | res.first.should be_kind_of(Hash) 15 | puts "returned result of #{res.count} rows" 16 | end 17 | end 18 | 19 | describe '#get_large_saved_search' do 20 | it "should get a saved search" do 21 | nsc = Netsuite::Client.new(ENV['NETSUITE_ACCOUNT_ID'], 22 | ENV['NETSUITE_LOGIN'], 23 | ENV['NETSUITE_PASSWORD'], 24 | ENV['NETSUITE_ROLE_ID']) 25 | res = nsc.get_large_saved_search('InventoryItem', '678') 26 | res.should_not be_empty 27 | res.should be_kind_of(Array) 28 | res.first.should be_kind_of(Hash) 29 | puts "returned result of #{res.count} rows" 30 | end 31 | end 32 | 33 | end 34 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | require 'rubygems' 4 | require 'bundler' 5 | begin 6 | Bundler.setup(:default, :development) 7 | rescue Bundler::BundlerError => e 8 | $stderr.puts e.message 9 | $stderr.puts "Run `bundle install` to install missing gems" 10 | exit e.status_code 11 | end 12 | require 'rake' 13 | 14 | require 'jeweler' 15 | Jeweler::Tasks.new do |gem| 16 | # gem is a Gem::Specification... see http://docs.rubygems.org/read/chapter/20 for more options 17 | gem.name = "netsuite-rest-client" 18 | gem.homepage = "http://github.com/jkaneacumen/netsuite-rest-client" 19 | gem.license = "MIT" 20 | gem.summary = %Q{RESTlet-based client for Netsuite} 21 | gem.description = %Q{RESTlet-based client for Netsuite} 22 | gem.email = "jkane@acumenholdings.com" 23 | gem.authors = ["Jim Kane"] 24 | # dependencies defined in Gemfile 25 | end 26 | Jeweler::RubygemsDotOrgTasks.new 27 | 28 | require 'rspec/core' 29 | require 'rspec/core/rake_task' 30 | RSpec::Core::RakeTask.new(:spec) do |spec| 31 | spec.pattern = FileList['spec/**/*_spec.rb'] 32 | end 33 | 34 | RSpec::Core::RakeTask.new(:rcov) do |spec| 35 | spec.pattern = 'spec/**/*_spec.rb' 36 | spec.rcov = true 37 | end 38 | 39 | task :default => :spec 40 | 41 | require 'rake/rdoctask' 42 | Rake::RDocTask.new do |rdoc| 43 | version = File.exist?('VERSION') ? File.read('VERSION') : "" 44 | 45 | rdoc.rdoc_dir = 'rdoc' 46 | rdoc.title = "netsuite-rest-client #{version}" 47 | rdoc.rdoc_files.include('README*') 48 | rdoc.rdoc_files.include('lib/**/*.rb') 49 | end 50 | -------------------------------------------------------------------------------- /netsuite-rest-client.gemspec: -------------------------------------------------------------------------------- 1 | # Generated by jeweler 2 | # DO NOT EDIT THIS FILE DIRECTLY 3 | # Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec' 4 | # -*- encoding: utf-8 -*- 5 | 6 | Gem::Specification.new do |s| 7 | s.name = "netsuite-rest-client" 8 | s.version = "1.0.2" 9 | 10 | s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= 11 | s.authors = ["Jim Kane"] 12 | s.date = "2012-01-25" 13 | s.description = "RESTlet-based client for Netsuite" 14 | s.email = "jkane@acumenholdings.com" 15 | s.extra_rdoc_files = [ 16 | "LICENSE.txt", 17 | "README.rdoc" 18 | ] 19 | s.files = [ 20 | ".document", 21 | ".rspec", 22 | "Gemfile", 23 | "Gemfile.lock", 24 | "LICENSE.txt", 25 | "README.rdoc", 26 | "Rakefile", 27 | "VERSION", 28 | "lib/netsuite-rest-client.rb", 29 | "lib/restlets/rest.js", 30 | "netsuite-rest-client.gemspec", 31 | "spec/netsuite-rest-client_spec.rb", 32 | "spec/spec_helper.rb" 33 | ] 34 | s.homepage = "http://github.com/jkaneacumen/netsuite-rest-client" 35 | s.licenses = ["MIT"] 36 | s.require_paths = ["lib"] 37 | s.rubygems_version = "1.8.11" 38 | s.summary = "RESTlet-based client for Netsuite" 39 | 40 | if s.respond_to? :specification_version then 41 | s.specification_version = 3 42 | 43 | if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then 44 | s.add_runtime_dependency(%q, [">= 0"]) 45 | s.add_runtime_dependency(%q, [">= 0"]) 46 | s.add_development_dependency(%q, ["~> 2.4"]) 47 | s.add_development_dependency(%q, [">= 1.0.0"]) 48 | s.add_development_dependency(%q, ["~> 1.8.3"]) 49 | else 50 | s.add_dependency(%q, [">= 0"]) 51 | s.add_dependency(%q, [">= 0"]) 52 | s.add_dependency(%q, ["~> 2.4"]) 53 | s.add_dependency(%q, [">= 1.0.0"]) 54 | s.add_dependency(%q, ["~> 1.8.3"]) 55 | end 56 | else 57 | s.add_dependency(%q, [">= 0"]) 58 | s.add_dependency(%q, [">= 0"]) 59 | s.add_dependency(%q, ["~> 2.4"]) 60 | s.add_dependency(%q, [">= 1.0.0"]) 61 | s.add_dependency(%q, ["~> 1.8.3"]) 62 | end 63 | end 64 | 65 | -------------------------------------------------------------------------------- /README.rdoc: -------------------------------------------------------------------------------- 1 | == Netsuite Rest Client 2 | 3 | *NOTE:* This project is being replaced by rest_suite[https://github.com/acumenbrands/rest_suite/]. 4 | If you're starting a new system, please use that codebase. We will keep netsuite-rest-client on 5 | life support for the time being. 6 | 7 | This is intended to be a RESTlet-based client for Netsuite. It 8 | should offer a significant speed advantage over the normal 9 | SOAP-based connection scheme (and it should cost less to operate 10 | since it allows 50 simultaneous connections on an ordinary 11 | license). 12 | 13 | == Contributing to netsuite-rest-client 14 | 15 | * Check out the latest master to make sure the feature hasn't been implemented or the bug hasn't been fixed yet 16 | * Check out the issue tracker to make sure someone already hasn't requested it and/or contributed it 17 | * Fork the project 18 | * Start a feature/bugfix branch 19 | * Commit and push until you are happy with your contribution 20 | * Make sure to add tests for it. This is important so I don't break it in a future version unintentionally. 21 | * Please try not to mess with the Rakefile, version, or history. If you want to have your own version, or is otherwise necessary, that is fine, but please isolate to its own commit so I can cherry-pick around it. 22 | * If you're interested in the future of netsuite-rest-client, take a look at the rest_suite[https://github.com/acumenbrands/rest_suite/]. 23 | 24 | == Copyright 25 | 26 | Copyright (c) 2012 Acumen Brands, Inc. See LICENSE.txt for 27 | further details. 28 | 29 | == Method Documentation 30 | 31 | Generally, all arguments are ultimately strings as they are placed into JSON or URI 32 | requests. However, the client is capable of accepting Fixnum objects and others, so long 33 | as the data type includes a to_s method. If a specific data type is expected, it will be 34 | declared in the params section. 35 | 36 | Returns are almost always a hash parsed from a JSON casting of the server-side object. 37 | 38 | = initialize 39 | Params: 40 | account_id: business account id 41 | login: account login associated with the business id 42 | password: password for the login 43 | role_id: id of the role for the client to assume 44 | 45 | Return: 46 | A new client object with the given parameters 47 | 48 | Example: 49 | Netsuite::Client.new('123456', 'email@domain.suffix', 'passw0rd', '17') 50 | 51 | = initialize_record 52 | Params: 53 | record_type: name of the record type 54 | 55 | Return: 56 | A hash of field names with default values for a Netsuite record of the given type 57 | 58 | Example: 59 | initialize_record('InventoryItem') 60 | 61 | = get_record 62 | Params: 63 | record_type: name of the record type 64 | internal_id: the internal id of the Netsuite record 65 | 66 | Return: 67 | A hash of field names populated with values from the record matching the internal id 68 | 69 | Example: 70 | get_record('InventoryItem', '233') 71 | 72 | = search_records 73 | Params: 74 | record_type: name of the record type 75 | search_filters: a hash of fields associated to a hash of the desire value and operator for comparison 76 | return_columns: a hash of field names associated with field JOIN requests 77 | options: 78 | :search_batch_size - (String/Fixnum) set a max size of record to fetch in a single request 79 | :verbose - (Boolean) prints status of search after each execution while fetching records 80 | 81 | Return: 82 | An array of record hashes 83 | 84 | Example: 85 | search_records('InventoryItem', 86 | {'displayName' => {'value'=>'PRODUCTSKU', 'operator'=>'is'}}, 87 | {'displayName' => nil, 'custitem22' => nil}) 88 | 89 | = upsert 90 | Params: 91 | record_type: name of the record type 92 | record_data: a hash of fields associated with the desired values 93 | options: 94 | :batch_size - (String/Fixnum) size of batch to be written in a single request 95 | :do_sourcing - (Boolean) enables or disabled sourcing in nlapiSubmitRecord calls 96 | :ignore_mandatory - (Boolean) ignore mandatory feilds 97 | 98 | ** If an id is provided as a field, it will updated a record with the given id or fail to write, 99 | ** otherwise it creates a new record with the given values. 100 | 101 | Return: 102 | An array of responses for each requested write. It will be an internal id for the newly created or updated 103 | record paired with the parameters hash used to create or update the record. In the vent of an error, the id 104 | will be replaced by the exception caught upon write. 105 | 106 | Example: 107 | upsert('InventoryItem', 108 | [{'id' => {'value'=>'1234567', 'operator=>'is'}, 109 | 'displayName' => {'value'=>'PRODUCTSKU', 'operator'=>'is'}}]) 110 | 111 | = delete 112 | Params: 113 | record_type: name of the record type 114 | internal_ids: an array of internal record ids 115 | options: 116 | :batch_size - (String/Fixnum) size of batch to be written in a single request 117 | 118 | Return: 119 | An array of response lists. Each record response will be the internal id paired with *false* in the event 120 | of success, or an exception in the event of a deletion failure. 121 | 122 | Example: 123 | delete('InventoryItem', ['1', '2356', '584309']) 124 | 125 | = get_saved_search 126 | Params: 127 | record_type: name of the record type the search will return 128 | search_id: id of the saved search 129 | options: 130 | :search_batch_size - (String/Fixnum) set a max size of record to fetch in a single request 131 | :verbose - (Boolean) prints status of search after each execution while fetching records 132 | 133 | Return: 134 | A list of record hashes corresponding to each row of the search results. 135 | 136 | Example: 137 | get_saved_search('InventoryItem', '726') 138 | -------------------------------------------------------------------------------- /lib/netsuite-rest-client.rb: -------------------------------------------------------------------------------- 1 | require 'rest-client' 2 | require 'json' 3 | require 'uri' 4 | 5 | module Netsuite 6 | 7 | class Client 8 | 9 | DEFAULT_SCRIPT_ID = 13 10 | DEFAULT_DEPLOY_ID = 1 11 | DEFAULT_GET_RECORD_BATCH_SIZE = 10000 12 | DEFAULT_SEARCH_BATCH_SIZE = 1000 13 | DEFAULT_RETRY_LIMIT = 5 14 | DEFAULT_REQUEST_TIMEOUT = -1 15 | DEFAULT_UPSERT_BATCH_SIZE = 40 16 | DEFAULT_DELETE_BATCH_SIZE = 60 17 | DEFAULT_TRANSFORM_BATCH_SIZE = 10 18 | 19 | attr_accessor :headers, :request_timeout, :rest_script_id, 20 | :search_script_id, :rest_deploy_id, :search_deploy_id 21 | 22 | def initialize(account_id, login, password, role_id, options={}) 23 | super() 24 | 25 | auth_string = "NLAuth nlauth_account=#{account_id}," + 26 | "nlauth_email=#{URI.escape(login, Regexp.new("[^#{URI::PATTERN::UNRESERVED}]"))}," + 27 | "nlauth_signature=#{password}," + 28 | "nlauth_role=#{role_id}" 29 | 30 | @headers = { :authorization => auth_string, 31 | :content_type => "application/json" } 32 | 33 | @cookies = { "NS_VER" => "2011.2.0" } 34 | 35 | @timeout = options[:timeout] || DEFAULT_REQUEST_TIMEOUT 36 | 37 | @script_id = options[:rest_script_id] || DEFAULT_SCRIPT_ID 38 | @deploy_id = options[:rest_deploy_id] || DEFAULT_DEPLOY_ID 39 | 40 | @get_record_batch_size = options[:get_record_batch_size] || DEFAULT_GET_RECORD_BATCH_SIZE 41 | @search_batch_size = options[:search_batch_size] || DEFAULT_SEARCH_BATCH_SIZE 42 | 43 | @retry_limit = options[:retry_limit] || DEFAULT_RETRY_LIMIT 44 | 45 | @sandbox = options[:sandbox] 46 | end 47 | 48 | def initialize_record(record_type) 49 | params = { 'script' => @script_id, 50 | 'deploy' => @deploy_id, 51 | 'operation' => 'CREATE', 52 | 'record_type' => record_type } 53 | 54 | parse_json_result_from_rest(:get, params) 55 | end 56 | 57 | def get_record(record_type, internal_id_list, options={}) 58 | internal_id_list = Array(internal_id_list).uniq 59 | 60 | params = { 'script' => @script_id, 61 | 'deploy' => @deploy_id } 62 | 63 | payload = { 'operation' => 'LOAD', 64 | 'record_type' => record_type } 65 | 66 | results = [] 67 | batch_size = options[:get_record_batch_size] || @get_record_batch_size 68 | 69 | internal_id_list.each_slice(batch_size) do |id_chunk| 70 | payload['internal_id_list'] = id_chunk 71 | rc = parse_json_result_from_rest(:post, params, :payload=>payload) 72 | results += [rc].flatten 73 | puts "Fetched #{results.count} records so far..." if options[:verbose] 74 | end 75 | 76 | results = results.first if results.length == 1 && !options[:return_array_on_single] 77 | results 78 | end 79 | 80 | def search_records(record_type, search_filters, return_columns, options={}) 81 | results = Array.new 82 | params = { 'script' => @script_id, 83 | 'deploy' => @deploy_id } 84 | 85 | payload = { 'operation' => 'SEARCH', 86 | 'record_type' => record_type, 87 | 'start_id' => options.fetch(:start_id, 0), 88 | 'search_filters' => search_filters, 89 | 'return_columns' => return_columns } 90 | 91 | batch_size = options[:search_batch_size] || @search_batch_size 92 | if batch_size.to_i % 1000 == 0 93 | payload['batch_size'] = batch_size 94 | else 95 | warn "Batch size is not a multiple of 1000, defaulting to #{DEFAULT_SEARCH_BATCH_SIZE}!" 96 | payload['batch_size'] = DEFAULT_SEARCH_BATCH_SIZE 97 | end 98 | 99 | begin 100 | results_segment, payload['start_id'] = *parse_json_result_from_rest(:post, params, :payload=>payload) 101 | results += results_segment unless results_segment.empty? 102 | puts "Fetched #{results.count} records so far, querying from #{payload['start_id']}..." if options[:verbose] 103 | end while (results_segment.length == payload['batch_size'].to_i) && !options[:exit_after_first_batch] 104 | 105 | results 106 | end 107 | 108 | def upsert(record_type, record_data, options={}) 109 | params = { 'script' => @script_id, 110 | 'deploy' => @deploy_id } 111 | results = Array.new 112 | 113 | record_data.each_slice(options[:batch_size] || DEFAULT_UPSERT_BATCH_SIZE) do |record_data_chunk| 114 | payload = { 'operation' => 'UPSERT', 115 | 'record_type' => record_type, 116 | 'record_data' => record_data_chunk, 117 | 'do_sourcing' => options[:do_sourcing] || true, 118 | 'ignore_mandatory' => options[:ignore_mandatory] || false } 119 | 120 | results += parse_json_result_from_rest(:post, params, :payload=>payload) 121 | end 122 | 123 | results 124 | end 125 | 126 | def delete(record_type, internal_ids, options={}) 127 | params = { 'script' => @script_id, 128 | 'deploy' => @deploy_id } 129 | results = Array.new 130 | 131 | internal_ids = internal_ids.map { |id| id.to_s } 132 | 133 | internal_ids.each_slice(options[:batch_size] || DEFAULT_DELETE_BATCH_SIZE) do |internal_ids_chunk| 134 | payload = { 'operation' => 'DELETE', 135 | 'record_type' => record_type, 136 | 'internal_ids' => internal_ids_chunk } 137 | 138 | results += parse_json_result_from_rest(:post, params, :payload=>payload) 139 | end 140 | 141 | results 142 | end 143 | 144 | def transform(initial_record_type, result_record_type, internal_id, field_changes, sublist_changes, options={}) 145 | results = Array.new 146 | params = { 'script' => @script_id, 147 | 'deploy' => @deploy_id } 148 | 149 | payload = { 'operation' => 'TRANSFORM', 150 | 'initial_record_type' => initial_record_type, 151 | 'result_record_type' => result_record_type, 152 | 'internal_id' => internal_id, 153 | 'field_changes' => field_changes, 154 | 'sublist_changes' => sublist_changes } 155 | 156 | parse_json_result_from_rest(:post, params, :payload=>payload) 157 | end 158 | 159 | def get_saved_search(record_type, search_id, options={}) 160 | results = Array.new 161 | params = { 'script' => @script_id, 162 | 'deploy' => @deploy_id, 163 | 'operation' => 'SAVED', 164 | 'record_type' => record_type, 165 | 'search_id' => search_id, 166 | 'start_id' => options.fetch(:start_id, 0) } 167 | 168 | batch_size = options[:search_batch_size] || @search_batch_size 169 | if batch_size.to_i % 1000 == 0 170 | params['batch_size'] = batch_size 171 | else 172 | warn "Batch size is not a multiple of 1000, defaulting to #{DEFAULT_SEARCH_BATCH_SIZE}!" 173 | params['batch_size'] = DEFAULT_SEARCH_BATCH_SIZE 174 | end 175 | 176 | begin 177 | results_segment, params['start_id'] = *parse_json_result_from_rest(:get, params) 178 | results_segment.class == Array ? results += results_segment : raise("Search error: #{results_segment}") 179 | puts "Fetched #{results.count} records so far, querying from #{params['start_id']}..." if options[:verbose] 180 | end while (results_segment.length == params['batch_size'].to_i) && !options[:exit_after_first_batch] 181 | 182 | results 183 | end 184 | 185 | def get_large_saved_search(record_type, search_id, options={}) 186 | results = Array.new 187 | params = { 'script' => @script_id, 188 | 'deploy' => @deploy_id, 189 | 'operation' => 'LSAVED', 190 | 'record_type' => record_type, 191 | 'search_id' => search_id } 192 | 193 | results_segment = *parse_json_result_from_rest(:get, params) 194 | results_segment.class == Array ? results += results_segment : raise("Search error: #{results_segment}") 195 | 196 | results 197 | end 198 | 199 | def parse_json_result_from_rest(method, params, options={}) 200 | rest_params = { :method => method, 201 | :url => create_url(params), 202 | :headers => @headers, 203 | :cookies => @cookies, 204 | :timeout => @timeout } 205 | 206 | if options[:payload] 207 | rest_params[:payload] = stringify(options[:payload]).to_json 208 | rest_params[:content_type] = :json 209 | rest_params[:accept] = :json 210 | end 211 | 212 | reply = nil 213 | retryable(@retry_limit, Exception) do 214 | reply = RestClient::Request.execute(rest_params) { |response, request, result, &block| 215 | case response.code 216 | when 200 217 | response 218 | else 219 | raise "Error with Netsuite response: #{response}" 220 | end 221 | } 222 | end 223 | 224 | begin 225 | parsed = JSON.parse(reply, :symbolize_names=>true) 226 | rescue Exception => e 227 | raise "Unable to parse reply from Netsuite: #{reply}" 228 | end 229 | 230 | if !parsed.first || parsed.flatten.include?("UNEXPECTED_ERROR") 231 | raise "Error processing request: #{parsed.last.to_s}" 232 | else 233 | parsed.last 234 | end 235 | end 236 | 237 | def stringify(data) 238 | if data.class == Array 239 | data.map { |value_item| stringify(value_item) } 240 | elsif data.class == Hash 241 | data.inject({}) do |hash, (key, value)| 242 | hash[key.to_s] = stringify(value) 243 | hash 244 | end 245 | else 246 | data.to_s 247 | end 248 | end 249 | 250 | def create_url(params) 251 | base_url + '?' + params.map { |key, value| "#{key}=#{value}" }.join('&') 252 | end 253 | 254 | def retryable(tries, exception, &block) 255 | begin 256 | return yield 257 | rescue *exception 258 | retry if (tries -= 1) > 0 259 | end 260 | 261 | yield 262 | end 263 | 264 | def base_url 265 | if @sandbox 266 | "https://rest.sandbox.netsuite.com/app/site/hosting/restlet.nl" 267 | else 268 | "https://rest.netsuite.com/app/site/hosting/restlet.nl" 269 | end 270 | end 271 | 272 | end 273 | 274 | end 275 | -------------------------------------------------------------------------------- /lib/restlets/rest.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This RESTlet covers the basic operations for interacting with Netsuite data. 3 | */ 4 | 5 | /* 6 | * Constants 7 | */ 8 | var OPERATIONS = { 'CREATE': { 'function': 'initializeRecord', 9 | 'access': 'GET', 10 | 'baseGovernance': 10 }, 11 | 'LOAD': { 'function': 'loadRecord', 12 | 'access': 'POST', 13 | 'baseGovernance': 10 }, 14 | 'SAVED': { 'function': 'getSavedSearch', 15 | 'access': 'GET', 16 | 'baseGovernance': 10 }, 17 | 'LSAVED': { 'function': 'getLargeSavedSearch', 18 | 'access': 'GET', 19 | 'baseGovernance': 10 }, 20 | 'SEARCH': { 'function': 'searchRecords', 21 | 'access': 'POST', 22 | 'baseGovernance': 10 }, 23 | 'UPSERT': { 'function': 'upsertRecords', 24 | 'access': 'POST', 25 | 'baseGovernance': 20 }, 26 | 'DELETE': { 'function': 'deleteRecords', 27 | 'access': 'POST', 28 | 'baseGovernance': 20 }, 29 | 'TRANSFORM': { 'function': 'transformRecords', 30 | 'access': 'POST', 31 | 'baseGovernance': 20 } } 32 | 33 | /* 34 | * **Utility Functions** 35 | */ 36 | function evalOperation(method, operation, request) { 37 | /* 38 | * Description: Evalutes the function call passed in by the client 39 | * Params: 40 | * request: Request object from the REST client 41 | * 42 | * Return: Passes up the values produced by API wrapper functions 43 | */ 44 | if(method == OPERATIONS[operation]['access']) { 45 | return(eval(OPERATIONS[operation]['function'] + "(request);")); 46 | } 47 | else { 48 | var errorMessage = "The operation [" + operation + "] cannot be accessed via the REST method " + 49 | "requested. Methods allowed: [" + OPERATIONS[operation]['access'] + "]"; 50 | throw new Error(errorMessage); 51 | } 52 | } 53 | 54 | function performSearch(recordType, batchSize, lowerBound, rawFilters, rawColumns) { 55 | /* 56 | * Description: Runs a search based on the given field->value criteria 57 | * Params: 58 | * recordType: The type of record to be covered by the search 59 | * batchSize: Size of the batch to be returned upon completion 60 | * lowerBound: Id to determine the lower bound of the batch, results 61 | * returned will all have ids greater than the value given 62 | * rawFilters: Hash of fields with the values to be matched by an included operator 63 | * rawColumns: Hash of the columns with joins names to be returned for each record 64 | * 65 | * Return: A list of results with ids and columns to match the results filter 66 | */ 67 | var searchFilters = [new nlobjSearchFilter('internalidnumber', null, 'greaterthan', lowerBound)]; 68 | var returnColumns = [new nlobjSearchColumn('internalid', null).setSort()]; 69 | var accumulatedResults = []; 70 | 71 | for(var filter in rawFilters) { 72 | searchFilters[searchFilters.length] = new nlobjSearchFilter(filter, null, 73 | rawFilters[filter]['operator'], 74 | rawFilters[filter]['value']); 75 | if(rawFilters[filter].hasOwnProperty('formula')) { 76 | fieldName = rawFilters[filter]['formula']['field'] 77 | valueList = rawFilters[filter]['formula']['values'] 78 | comparison = rawFilters[filter]['formula']['comparison'] 79 | joinOperator = rawFilters[filter]['formula']['join_operator'] 80 | conditions = []; 81 | 82 | formulaString = "CASE WHEN ("; 83 | for(index in valueList) { 84 | conditions.push("{" + fieldName + "} " + comparison + " '" + valueList[index] + "'"); 85 | } 86 | formulaString += conditions.join(' ' + joinOperator + ' '); 87 | formulaString += ") THEN 1 ELSE 0 END" 88 | 89 | // return([[formulaString], 0]); 90 | 91 | searchFilters[searchFilters.length-1].setFormula(formulaString); 92 | } 93 | } 94 | 95 | for(var column in rawColumns) { 96 | returnColumns[returnColumns.length] = new nlobjSearchColumn(column, 97 | rawColumns[column]); 98 | } 99 | 100 | do { 101 | var tempItems = nlapiSearchRecord(recordType, null, searchFilters, returnColumns); 102 | if(tempItems) { 103 | lowerBound = tempItems[tempItems.length - 1].getId(); 104 | accumulatedResults = accumulatedResults.concat(tempItems); 105 | } 106 | } while(tempItems && tempItems.length == 1000 && accumulatedResults.length < batchSize); 107 | 108 | return([accumulatedResults, lowerBound]); 109 | } 110 | 111 | function populateLineItems(record, lineItemHash) { 112 | for(var lineItemFieldName in lineItemHash) { 113 | for(var index = 0; index < lineItemHash[lineItemFieldName].length; index++) { 114 | var lineItemIndex = record.getLineItemCount(lineItemFieldName) + 1; 115 | record.insertLineItem(lineItemFieldName, lineItemIndex); 116 | for(lineItemField in lineItemHash[lineItemFieldName][index]) { 117 | record.setLineItemValue(lineItemFieldName, lineItemField, index+1, 118 | lineItemHash[lineItemFieldName][index][lineItemField]); 119 | } 120 | } 121 | } 122 | } 123 | 124 | function governanceCheck(operation, iterations) { 125 | /* 126 | * Description: Determins if a given execution of this method will exceed the governance limit 127 | * Params: 128 | * function: Function object 129 | * iterations: Integer count of the number of iterations of governed nlapi calls 130 | * the execution will make 131 | * 132 | * Return: True if under the limite, false if not 133 | */ 134 | var governanceLimit = nlapiGetContext().getRemainingUsage(); 135 | 136 | if(OPERATIONS['operation']['baseGovernance']*iterations > governanceLimit) { 137 | return(false); 138 | } 139 | return(true); 140 | } 141 | 142 | function formatException(exception) { 143 | /* 144 | * Description: Format an exception to send to the client 145 | * Params: 146 | * exception: An exception object 147 | * 148 | * Return: A serialized exception object 149 | */ 150 | var serializedException = [exception.name.toString(), exception.message]; 151 | 152 | try { 153 | return(serializedException.concat(exception.getStackTrace())); 154 | } 155 | catch(stack_fetch_error) { 156 | return(serializedException.concat([stack_fetch_error.message])); 157 | } 158 | } 159 | 160 | /* 161 | * Netsuite API Call Wrapper Functions 162 | */ 163 | function initializeRecord(request) { 164 | /* 165 | * Description: Retrieves an initialized object with the given parameters 166 | * Params: 167 | * request.recordType: String matching a record type 168 | * 169 | * Return: An instantiated object of the given type 170 | */ 171 | var recordType = request.record_type; 172 | 173 | return(nlapiCreateRecord(recordType)); 174 | } 175 | 176 | function loadRecord(request) { 177 | /* 178 | * Description: Retrieves a single record based on query fields 179 | * Params: 180 | * request['record_type']: String matching a record type 181 | * request['internal_id_list']: Array of Strings matching the internal ids of multiple records 182 | * 183 | * Return: Record of given type with given id 184 | */ 185 | var recordType = request['record_type']; 186 | var internalIdList = request['internal_id_list']; 187 | var recordList = []; 188 | 189 | for(var index = 0; index < internalIdList.length; index++) { 190 | recordId = internalIdList[index]; 191 | 192 | try { 193 | recordList.push(nlapiLoadRecord(recordType, recordId)); 194 | } 195 | catch(load_exception) { 196 | recordList.push(formatException(load_exception)); 197 | } 198 | } 199 | 200 | return(recordList); 201 | } 202 | 203 | function searchRecords(request) { 204 | /* 205 | * Description: Runs a search based on the given field->value criteria 206 | * Params: 207 | * request['record_type']: The type of record to be covered by the search 208 | * request['search_filters']: List of fields with the values to be matched 209 | * request['return_columns']: List of the columns names to be returned for each record 210 | * request['batch_size']: Size of the batch to be returned upon completion 211 | * request['start_id']: Id to determine the lower bound of the batch, results 212 | * returned will all have ids greater than the value given 213 | * 214 | * Return: A list of results with ids and columns to match the results filter 215 | */ 216 | var recordType = request['record_type']; 217 | var batchSize = request['batch_size']; 218 | var lowerBound = request['start_id']; 219 | var rawFilters = request['search_filters']; 220 | var rawColumns = request['return_columns']; 221 | 222 | return(performSearch(recordType, batchSize, lowerBound, rawFilters, rawColumns)); 223 | } 224 | 225 | function upsertRecords(request) { 226 | /* 227 | * Description: Updates a record with given field values, can ignore validations if requested 228 | * Params: 229 | * request['record_type']: String matching a valid record type 230 | * request['record_data']: Raw Record data 231 | * request['update_only']: Boolean value that, if true, only allows updates to occur, 232 | * no new records will be created 233 | * request['do_sourcing']: Boolean value to set sourcing mode 234 | * request['ignore_mandatory']: Boolean value to set ignoring of validations for mandatory fields 235 | * 236 | * Return: Internal ids of the comitted records and errors for uncommitted records 237 | */ 238 | var recordType = request['record_type']; 239 | var recordData = request['record_data']; 240 | var doSourcing = request['do_sourcing']; 241 | var ignoreMandatory = request['ignore_mandatory']; 242 | var writeResults = []; 243 | 244 | for(var index = 0; index < recordData.length; index++) { 245 | attributes = recordData[index]; 246 | record = null; 247 | 248 | try { 249 | if(attributes['id'] != undefined) { 250 | record = nlapiLoadRecord(recordType, attributes['id']); 251 | } else { 252 | record = nlapiCreateRecord(recordType); 253 | } 254 | for(var field in attributes) { 255 | record.setFieldValue(field, attributes[field]); 256 | if(field=='sublist_fields') { populateLineItems(record, attributes[field]); } 257 | } 258 | writeResults = writeResults.concat([[nlapiSubmitRecord(record, doSourcing, ignoreMandatory), attributes]]); 259 | } 260 | catch(write_exception) { 261 | writeResults = writeResults.concat([[formatException(write_exception), attributes]]); 262 | } 263 | } 264 | 265 | return(writeResults); 266 | } 267 | 268 | function deleteRecords(request) { 269 | /* 270 | * Description: Deletes a record of given type with the given ids 271 | * Params: 272 | * request['record_type']: String matching a record type 273 | * request['internal_ids']: Array of record ids 274 | * 275 | * Return: An array of ids pairs with false, if no exception, and a formatted exception 276 | * in the event of an error with deletion 277 | */ 278 | var recordType = request['record_type']; 279 | var internalIds = request['internal_ids']; 280 | var results = []; 281 | 282 | for(var index = 0; index < internalIds.length; index++) { 283 | itemId = internalIds[index]; 284 | 285 | try { 286 | nlapiDeleteRecord(recordType, itemId); 287 | results = results.concat([itemId, false]); 288 | } 289 | catch(exception) { 290 | results = results.concat([itemId, formatException(exception)]); 291 | } 292 | } 293 | 294 | return(results); 295 | } 296 | 297 | function transformRecords(request) { 298 | /* 299 | * Description: Transforms records from their original, given type to the requested type. 300 | * 301 | * Params: 302 | * request['initial_record_type']: String of the initial record type to lookup 303 | * request['result_record_type']: String of the record type post-transformation 304 | * request['internal_id']: Internal ID of the record to be transformed 305 | * request['field_changes']: A hash of field names as keys with their values 306 | * request['sublist_changes']: A hash of sublist names with assigned arrays of hashes; 307 | * hashes within the list correspond by internal id to 308 | * referenced line items. Line items referenced will be 309 | * altered according to field values given, unreferenced 310 | * items will be removed from the list in the transformed 311 | * record. 312 | * 313 | * Return: Array of original record ids paired with the internal id of the transformed 314 | * record or an error for a failed transformation 315 | */ 316 | var initialRecordType = request['initial_record_type']; 317 | var resultRecordType = request['result_record_type']; 318 | var internalId = request['internal_id']; 319 | var fieldChanges = request['field_changes']; 320 | var sublistChanges = request['sublist_changes']; 321 | 322 | newRecord = nlapiTransformRecord(initialRecordType, internalId, resultRecordType); 323 | 324 | // Alter field values on transformed record 325 | for(var field in fieldChanges) { newRecord.setFieldValue(field, fieldChanges[field]); } 326 | 327 | for(var sublistName in sublistChanges) { 328 | matchField = sublistChanges[sublistName]['match_field']; 329 | sublistItems = sublistChanges[sublistName]['line_items']; 330 | indexesToDelete = []; 331 | 332 | // Alter line item values to match hash values, remove items that are not referenced 333 | for(var lineItemIndex = 1; lineItemIndex < (newRecord.getLineItemCount(sublistName) + 1); lineItemIndex++) { 334 | 335 | for(var sublistItemDataIndex = 0; sublistItemDataIndex < sublistItems.length; sublistItemDataIndex++) { 336 | sublistItemData = sublistItems[sublistItemDataIndex]; 337 | 338 | if(newRecord.getLineItemValue(sublistName, matchField, lineItemIndex) == sublistItemData[matchField]) { 339 | found = true; 340 | 341 | for(var sublistItemField in sublistItemData) { 342 | if(sublistItemField == matchField) { continue; } 343 | newRecord.setLineItemValue(sublistName, sublistItemField, lineItemIndex, 344 | sublistItemData[sublistItemField]); 345 | } 346 | } 347 | } 348 | if(!found) { indexesToDelete = indexesToDelete.concat([lineItemIndex]); } 349 | } 350 | 351 | deletionCount = 0; 352 | for(var index = 0; index < indexesToDelete.length; index++) { 353 | deletionIndex = indexesToDelete[index]; 354 | newRecord.removeLineItem(sublistName, (deletionIndex - deletionCount)); 355 | deletionCount++; 356 | } 357 | } 358 | 359 | return([nlapiSubmitRecord(newRecord, false, false), [internalId, fieldChanges, sublistChanges]]); 360 | } 361 | 362 | function getSavedSearch(request) { 363 | /* 364 | * Description: Retrieves results from a given saved search of the defined batch size rounded up to the next 365 | * one thousand records 366 | * Params: 367 | * request.search_id: Id of the saved search to run 368 | * request.record_type: String of the record type to fetch 369 | * request.batch_size: Size of the batch to be returned upon completion 370 | * request.start_id: Id to determine the lower bound of the batch, results 371 | * returned will all have ids greater than the value given 372 | * 373 | * Return: List of result rows with internal ids from the given start_id up through a count of the given 374 | * batch size or next highest multiple of one thousand from the given batch size if the given size 375 | * is not a multiple of one thousand 376 | */ 377 | var searchId = request.search_id; 378 | var recordType = request.record_type; 379 | var batchSize = request.batch_size; 380 | var lowerBound = request.start_id; 381 | var accumulatedResults = []; 382 | var searchFilters = [new nlobjSearchFilter('internalidnumber', null, 'greaterthan', lowerBound)]; 383 | var returnColumns = [new nlobjSearchColumn('internalid', null).setSort()]; 384 | 385 | do { 386 | var tempItems = nlapiSearchRecord(recordType, searchId, searchFilters, returnColumns); 387 | if(tempItems) { 388 | lowerBound = tempItems[tempItems.length - 1].getId(); 389 | searchFilters = [new nlobjSearchFilter('internalidnumber', null, 'greaterthan', lowerBound)]; 390 | accumulatedResults = accumulatedResults.concat(tempItems); 391 | } 392 | } while(tempItems && tempItems.length == 1000 && accumulatedResults.length < batchSize); 393 | 394 | return([accumulatedResults, lowerBound]); 395 | } 396 | 397 | function getLargeSavedSearch(request) { 398 | /* 399 | * Description: Retrieves results from a given saved search of the defined batch size rounded up to the next 400 | * one thousand records 401 | * Params: 402 | * request.search_id: Id of the saved search to run 403 | * request.record_type: String of the record type to fetch 404 | * 405 | * Return: List of result rows with internal ids from the given start_id up through a count of the given 406 | * batch size or next highest multiple of one thousand from the given batch size if the given size 407 | * is not a multiple of one thousand 408 | */ 409 | var searchId = request.search_id; 410 | var recordType = request.record_type; 411 | var accumulatedResults = []; 412 | 413 | var savedSearch = nlapiLoadSearch(recordType, searchId); 414 | var resultSet = savedSearch.runSearch(); 415 | var searchIndex = 0; 416 | 417 | do { 418 | var resultSlice = resultSet.getResults(searchIndex, searchIndex+1000); 419 | for (var rs in resultSlice) { 420 | accumulatedResults.push(resultSlice[rs]); 421 | searchIndex++; 422 | } 423 | } while(resultSlice.length >= 1000); 424 | 425 | return(accumulatedResults); 426 | } 427 | 428 | /* 429 | * Handler Functions 430 | */ 431 | function getHandler(request) { 432 | /* 433 | * Description: Method to handle requests over GET 434 | * Params: 435 | * request: Request object from the REST client 436 | * 437 | * Return: JSON response 438 | */ 439 | try { 440 | return([true].concat([evalOperation('GET', request.operation, request)])); 441 | } 442 | catch(exception) { 443 | return([false].concat([formatException(exception)])); 444 | } 445 | } 446 | 447 | function postHandler(request) { 448 | /* 449 | * Description: Method to handle requests over POST 450 | * Params: 451 | * request: Request object from the REST client 452 | * 453 | * Return: JSON response 454 | */ 455 | try { 456 | return([true].concat([evalOperation('POST', request['operation'], request)])); 457 | } 458 | catch(exception) { 459 | return([false].concat([formatException(exception)])); 460 | } 461 | } 462 | --------------------------------------------------------------------------------