Skip to content

Commit

Permalink
Increase max size of CSV to system's max int (#53)
Browse files Browse the repository at this point in the history
* Increase max size of CSV to system's max int

* Pylint fixes

* Fix syntax error

* More pylint

* Pylint
  • Loading branch information
dmosorast authored Sep 17, 2018
1 parent 0faaba3 commit f1ed9aa
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
source ~/.virtualenvs/tap-salesforce/bin/activate
pip install .
pip install pylint
pylint tap_salesforce -d missing-docstring,invalid-name,line-too-long,too-many-locals,too-few-public-methods,fixme,stop-iteration-return
pylint tap_salesforce -d missing-docstring,invalid-name,line-too-long,too-many-locals,too-few-public-methods,fixme,stop-iteration-return,no-else-return
- run:
name: 'Unit Tests'
command: |
Expand Down
10 changes: 4 additions & 6 deletions tap_salesforce/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@
import json
import sys
import singer
import singer.metrics as metrics
import singer.utils as singer_utils
from singer import metadata
from singer import metadata, metrics

import tap_salesforce.salesforce
from tap_salesforce.sync import (sync_stream, resume_syncing_bulk_query, get_stream_version)
Expand Down Expand Up @@ -102,8 +101,7 @@ def do_discover(sf):
"""Describes a Salesforce instance's objects and generates a JSON schema for each field."""
global_description = sf.describe()

objects_to_discover = set([o['name']
for o in global_description['sobjects']])
objects_to_discover = {o['name'] for o in global_description['sobjects']}
key_properties = ['Id']

sf_custom_setting_objects = []
Expand Down Expand Up @@ -177,7 +175,7 @@ def do_discover(sf):

# There are cases where compound fields are referenced by the associated
# subfields but are not actually present in the field list
field_name_set = set([f['name'] for f in fields])
field_name_set = {f['name'] for f in fields}
filtered_unsupported_fields = [f for f in unsupported_fields if f[0] in field_name_set]
missing_unsupported_field_names = [f[0] for f in unsupported_fields if f[0] not in field_name_set]

Expand Down Expand Up @@ -245,7 +243,7 @@ def do_discover(sf):
unsupported_tag_objects = [object_to_tag_references[f]
for f in sf_custom_setting_objects if f in object_to_tag_references]
if unsupported_tag_objects:
LOGGER.info(
LOGGER.info( #pylint:disable=logging-not-lazy
"Skipping the following Tag objects, Tags on Custom Settings Salesforce objects " +
"are not supported by the Bulk API:")
LOGGER.info(unsupported_tag_objects)
Expand Down
5 changes: 2 additions & 3 deletions tap_salesforce/salesforce/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@
import requests
from requests.exceptions import RequestException
import singer
import singer.metrics as metrics
import singer.utils as singer_utils
from singer import metadata
from singer import metadata, metrics

from tap_salesforce.salesforce.bulk import Bulk
from tap_salesforce.salesforce.rest import Rest
Expand Down Expand Up @@ -185,7 +184,7 @@ def field_to_property_schema(field, mdata):

return property_schema, mdata

class Salesforce(object):
class Salesforce():
# pylint: disable=too-many-instance-attributes,too-many-arguments
def __init__(self,
refresh_token=None,
Expand Down
7 changes: 5 additions & 2 deletions tap_salesforce/salesforce/bulk.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
# pylint: disable=protected-access
import csv
import json
import sys
import time
import tempfile
import singer
import singer.metrics as metrics
from singer import metrics

import xmltodict

Expand Down Expand Up @@ -35,11 +36,13 @@ def find_parent(stream):
return parent_stream


class Bulk(object):
class Bulk():

bulk_url = "{}/services/async/41.0/{}"

def __init__(self, sf):
# Set csv max reading size to the platform's max size available.
csv.field_size_limit(sys.maxsize)
self.sf = sf

def query(self, catalog_entry, state):
Expand Down
2 changes: 1 addition & 1 deletion tap_salesforce/salesforce/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

MAX_RETRIES = 4

class Rest(object):
class Rest():

def __init__(self, sf):
self.sf = sf
Expand Down
3 changes: 1 addition & 2 deletions tap_salesforce/sync.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import time
import singer
import singer.metrics as metrics
import singer.utils as singer_utils
from singer import Transformer, metadata
from singer import Transformer, metadata, metrics
from requests.exceptions import RequestException
from tap_salesforce.salesforce.bulk import Bulk

Expand Down

0 comments on commit f1ed9aa

Please sign in to comment.