Skip to content
This repository has been archived by the owner on Aug 14, 2024. It is now read-only.

Commit

Permalink
fix passing region when constructing resource graph
Browse files Browse the repository at this point in the history
  • Loading branch information
whummer committed Apr 16, 2021
1 parent c238c73 commit ae91eef
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 37 deletions.
73 changes: 37 additions & 36 deletions localstack/dashboard/infra.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,20 @@
LOG = logging.getLogger(__name__)


def get_kinesis_streams(filter='.*', pool={}, env=None):
def _connect(service, env=None, region=None):
return aws_stack.connect_to_service(service, region_name=region)


def get_kinesis_streams(filter='.*', pool={}, env=None, region=None):
if MOCK_OBJ:
return []
result = []
try:
kinesis_client = aws_stack.connect_to_service('kinesis')
kinesis_client = _connect('kinesis', region=region)
out = kinesis_client.list_streams()
for name in out['StreamNames']:
if re.match(filter, name):
details = kinesis_client.describe_stream(StreamArn=name)
details = kinesis_client.describe_stream(StreamName=name)
arn = details['StreamDescription']['StreamARN']
stream = KinesisStream(arn)
pool[arn] = stream
Expand All @@ -44,10 +48,10 @@ def get_kinesis_streams(filter='.*', pool={}, env=None):
return result


def get_kinesis_shards(stream_name=None, stream_details=None, env=None):
def get_kinesis_shards(stream_name=None, stream_details=None, env=None, region=None):
if not stream_details:
kinesis_client = aws_stack.connect_to_service('kinesis')
out = kinesis_client.describe_stream(StreamArn=stream_name)
kinesis_client = _connect('kinesis', env=env, region=region)
out = kinesis_client.describe_stream(StreamName=stream_name)
shards = out['StreamDescription']['Shards']
result = []
for s in shards:
Expand All @@ -58,13 +62,11 @@ def get_kinesis_shards(stream_name=None, stream_details=None, env=None):
return result


def get_sqs_queues(filter='.*', pool={}, env=None):
def get_sqs_queues(filter='.*', pool={}, env=None, region=None):
result = []
try:
sqs_client = aws_stack.connect_to_service('sqs')
sqs_client = _connect('sqs', env=env, region=region)
out = sqs_client.list_queues()
if not out.strip():
return result
queues = out['QueueUrls']
for q in queues:
name = q.split('/')[-1]
Expand All @@ -77,7 +79,7 @@ def get_sqs_queues(filter='.*', pool={}, env=None):
return result


def get_lambda_functions(filter='.*', details=False, pool={}, env=None):
def get_lambda_functions(filter='.*', details=False, pool={}, env=None, region=None):
if MOCK_OBJ:
return []

Expand All @@ -102,19 +104,19 @@ def handle(func):
LOG.warning("Unable to get code for lambda '%s'" % func_name)

try:
lambda_client = aws_stack.connect_to_service('lambda')
lambda_client = _connect('lambda', env=env, region=region)
out = lambda_client.list_functions()
parallelize(handle, out['Functions'])
except Exception:
pass
return result


def get_lambda_event_sources(func_name=None, env=None):
def get_lambda_event_sources(func_name=None, env=None, region=None):
if MOCK_OBJ:
return {}

lambda_client = aws_stack.connect_to_service('lambda')
lambda_client = _connect('lambda', env=env, region=region)
if func_name:
out = lambda_client.list_event_source_mappings(FunctionName=func_name)
else:
Expand All @@ -123,13 +125,13 @@ def get_lambda_event_sources(func_name=None, env=None):
return result


def get_lambda_code(func_name, retries=1, cache_time=None, env=None):
def get_lambda_code(func_name, retries=1, cache_time=None, env=None, region=None):
if MOCK_OBJ:
return ''
env = aws_stack.get_environment(env)
if cache_time is None and not aws_stack.is_local_env(env):
cache_time = AWS_LAMBDA_CODE_CACHE_TIMEOUT
lambda_client = aws_stack.connect_to_service('lambda')
lambda_client = _connect('lambda', env=env, region=region)
out = lambda_client.get_function(FunctionName=func_name)
loc = out['Code']['Location']
hash = md5(loc)
Expand Down Expand Up @@ -171,10 +173,10 @@ def get_lambda_code(func_name, retries=1, cache_time=None, env=None):
return result


def get_elasticsearch_domains(filter='.*', pool={}, env=None):
def get_elasticsearch_domains(filter='.*', pool={}, env=None, region=None):
result = []
try:
es_client = aws_stack.connect_to_service('es')
es_client = _connect('es', env=env, region=region)
out = es_client.list_domain_names()

def handle(domain):
Expand All @@ -194,10 +196,10 @@ def handle(domain):
return result


def get_dynamo_dbs(filter='.*', pool={}, env=None):
def get_dynamo_dbs(filter='.*', pool={}, env=None, region=None):
result = []
try:
dynamodb_client = aws_stack.connect_to_service('dynamodb')
dynamodb_client = _connect('dynamodb', env=env, region=region)
out = dynamodb_client.list_tables()

def handle(table):
Expand All @@ -217,8 +219,9 @@ def handle(table):
return result


def get_s3_buckets(filter='.*', pool={}, details=False, env=None):
def get_s3_buckets(filter='.*', pool={}, details=False, env=None, region=None):
result = []
s3_client = _connect('s3', env=env, region=region)

def handle(bucket):
bucket_name = bucket['Name']
Expand All @@ -229,7 +232,6 @@ def handle(bucket):
pool[arn] = bucket
if details:
try:
s3_client = aws_stack.connect_to_service('s3')
out = s3_client.get_bucket_notification(Bucket=bucket_name)
if out:
if 'CloudFunctionConfiguration' in out:
Expand All @@ -242,23 +244,21 @@ def handle(bucket):
print('WARNING: Unable to get details for bucket: %s' % e)

try:
s3_client = aws_stack.connect_to_service('s3')
out = s3_client.list_buckets()
parallelize(handle, out['Buckets'])
except Exception:
pass
return result


def get_firehose_streams(filter='.*', pool={}, env=None):
def get_firehose_streams(filter='.*', pool={}, env=None, region=None):
result = []
try:
firehose_client = aws_stack.connect_to_service('firehose')
firehose_client = _connect('firehose', env=env, region=region)
out = firehose_client.list_delivery_streams()
for stream_name in out['DeliveryStreamNames']:
if re.match(filter, stream_name):
details = firehose_client.describe_delivery_stream(
DeliveryStreamName=stream_name)
details = firehose_client.describe_delivery_stream(DeliveryStreamName=stream_name)
details = details['DeliveryStreamDescription']
arn = details['DeliveryStreamARN']
s = FirehoseStream(arn)
Expand All @@ -272,8 +272,8 @@ def get_firehose_streams(filter='.*', pool={}, env=None):
return result


def read_kinesis_iterator(shard_iterator, max_results=10, env=None):
kinesis_client = aws_stack.connect_to_service('kinesis')
def read_kinesis_iterator(shard_iterator, max_results=10, env=None, region=None):
kinesis_client = _connect('kinesis', env=env, region=region)
result = kinesis_client.get_records(ShardIterator=shard_iterator, Limit=max_results)
return result

Expand All @@ -299,16 +299,17 @@ def get_graph(name_filter='.*', env=None, **kwargs):

pool = {}
node_ids = {}
region = kwargs.get('region')

# Make sure we load components in the right order:
# (ES,DynamoDB,S3) -> (Kinesis,Lambda)
domains = get_elasticsearch_domains(name_filter, pool=pool, env=env)
dbs = get_dynamo_dbs(name_filter, pool=pool, env=env)
buckets = get_s3_buckets(name_filter, details=True, pool=pool, env=env)
streams = get_kinesis_streams(name_filter, pool=pool, env=env)
firehoses = get_firehose_streams(name_filter, pool=pool, env=env)
lambdas = get_lambda_functions(name_filter, details=True, pool=pool, env=env)
queues = get_sqs_queues(name_filter, pool=pool, env=env)
domains = get_elasticsearch_domains(name_filter, pool=pool, env=env, region=region)
dbs = get_dynamo_dbs(name_filter, pool=pool, env=env, region=region)
buckets = get_s3_buckets(name_filter, details=True, pool=pool, env=env, region=region)
streams = get_kinesis_streams(name_filter, pool=pool, env=env, region=region)
firehoses = get_firehose_streams(name_filter, pool=pool, env=env, region=region)
lambdas = get_lambda_functions(name_filter, details=True, pool=pool, env=env, region=region)
queues = get_sqs_queues(name_filter, pool=pool, env=env, region=region)

for es in domains:
uid = short_uid()
Expand Down
3 changes: 2 additions & 1 deletion localstack/services/edge.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,8 @@ def terminate_all_processes_in_docker():
def serve_resource_graph(data):
data = json.loads(to_str(data or '{}'))
env = Environment.from_string(data.get('awsEnvironment'))
graph = dashboard_infra.get_graph(name_filter=data.get('nameFilter') or '.*', env=env, region=data.get('awsRegion'))
graph = dashboard_infra.get_graph(name_filter=data.get('nameFilter') or '.*',
env=env, region=data.get('awsRegion'))
return graph


Expand Down

0 comments on commit ae91eef

Please sign in to comment.