Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add FluxRecord.row with response data stored in Array #118

Merged
merged 11 commits into from
Oct 7, 2022
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
## 2.8.0 [unreleased]

### Features
1. [#118](https://github.com/influxdata/influxdb-client-ruby/pull/118): Added `FluxRecord.row` which stores response data in a array

## 2.7.0 [2022-07-29]

### Features
Expand Down
2 changes: 2 additions & 0 deletions examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@

## Others
- [invokable_scripts.rb](invokable_scripts.rb) - How to use Invokable scripts Cloud API to create custom endpoints that query data
- [record_row_example.rb](record_row_example.rb) - How to use `FluxRecord.row` (Array) instead of `FluxRecord.values` (Hash),
in case of duplicity column names
38 changes: 38 additions & 0 deletions examples/record_row_example.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
require 'influxdb-client'

url = 'http://localhost:8086'
token = 'my-token'
bucket = 'my-bucket'
org = 'my-org'

client = InfluxDB2::Client.new(url,
token,
bucket: bucket,
org: org,
precision: InfluxDB2::WritePrecision::NANOSECOND,
use_ssl: false)

# Prepare Data
write_api = client.create_write_api
(1..5).each do |i|
write_api.write(data: "point,table=my-table result=#{i}", bucket: bucket, org: org)
end

# Query data with pivot
query_api = client.create_query_api
query = "from(bucket: \"#{bucket}\") |> range(start: -1m) |> filter(fn: (r) => (r[\"_measurement\"] == \"point\"))
|> pivot(rowKey:[\"_time\"], columnKey: [\"_field\"], valueColumn: \"_value\")"
result = query_api.query(query: query)

# Write data to output
puts '----------------------------------------------- FluxRecord.values ----------------------------------------------'
result[0].records.each do |record|
puts record.values
end

puts '------------------------------------------------- FluxRecord.row -----------------------------------------------'
result[0].records.each do |record|
puts record.row.join(',')
end

client.close!
14 changes: 11 additions & 3 deletions lib/influxdb2/client/flux_csv_parser.rb
Original file line number Diff line number Diff line change
Expand Up @@ -125,8 +125,8 @@ def _parse_line(csv)
token = csv[0]

# start new table
if ((ANNOTATIONS.include? token) && !@start_new_table) ||
(@response_mode == InfluxDB2::FluxResponseMode::ONLY_NAMES && @table.nil?)
if ((ANNOTATIONS.include? token) && !@start_new_table) || (@response_mode ==
InfluxDB2::FluxResponseMode::ONLY_NAMES && @table.nil?)

# Return already parsed DataFrame
@start_new_table = true
Expand Down Expand Up @@ -187,6 +187,12 @@ def _add_column_names_and_tags(table, csv)
column.label = csv[i]
i += 1
end

duplicates = table.columns.group_by { :label }.select { |_k, v| v.size > 1 }

warning = "The response contains columns with duplicated names: #{duplicates.keys.join(', ')}
You should use the 'FluxRecord.row to access your data instead of 'FluxRecord.values' hash."
puts warning unless duplicates.empty?
end

def _parse_values(csv)
Expand Down Expand Up @@ -234,7 +240,9 @@ def _parse_record(table_index, table, csv)
table.columns.each do |flux_column|
column_name = flux_column.label
str_val = csv[flux_column.index + 1]
record.values[column_name] = _to_value(str_val, flux_column)
value = _to_value(str_val, flux_column)
record.values[column_name] = value
record.row.push(value)
end

record
Expand Down
9 changes: 7 additions & 2 deletions lib/influxdb2/client/flux_table.rb
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def initialize
@columns = []
@records = []
end

attr_reader :columns, :records

# A table's group key is subset of the entire columns dataset that assigned to the table.
Expand All @@ -46,11 +47,14 @@ def group_key
class FluxRecord
# @param [Integer] table the index of table which contains the record
# @param [Hash] values tuple of values
def initialize(table, values: nil)
# @param [Array] row record columns
def initialize(table, values: nil, row: nil)
@table = table
@values = values || {}
@row = row || []
end
attr_reader :table, :values

attr_reader :table, :values, :row
attr_writer :table

# @return [Time] the inclusive lower time bound of all records
Expand Down Expand Up @@ -93,6 +97,7 @@ def initialize(index: nil, label: nil, data_type: nil, group: nil, default_value
@group = group
@default_value = default_value
end

attr_reader :index, :label, :data_type, :group, :default_value
attr_writer :index, :label, :data_type, :group, :default_value
end
Expand Down
20 changes: 20 additions & 0 deletions test/influxdb/flux_csv_parser_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -506,4 +506,24 @@ def test_parse_without_datatype
assert_equal '11', tables[0].records[0].values['value1']
assert_equal 'west', tables[0].records[0].values['region']
end

def test_parse_duplicate_column_names
data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,string,string,double
#group,false,false,true,true,false,true,true,false
#default,_result,,,,,,,
,result,table,_start,_stop,_time,_measurement,location,result
,,0,2022-09-13T06:14:40.469404272Z,2022-09-13T06:24:40.469404272Z,2022-09-13T06:24:33.746Z,my_measurement,Prague,25.3
,,0,2022-09-13T06:14:40.469404272Z,2022-09-13T06:24:40.469404272Z,2022-09-13T06:24:39.299Z,my_measurement,Prague,25.3
,,0,2022-09-13T06:14:40.469404272Z,2022-09-13T06:24:40.469404272Z,2022-09-13T06:24:40.454Z,my_measurement,Prague,25.3'

tables = InfluxDB2::FluxCsvParser.new(data, stream: false, response_mode: InfluxDB2::FluxResponseMode::ONLY_NAMES)
.parse
.tables
assert_equal 1, tables.size
assert_equal 8, tables[0].columns.size
assert_equal 3, tables[0].records.size
assert_equal 7, tables[0].records[0].values.size
assert_equal 8, tables[0].records[0].row.size
assert_equal 25.3, tables[0].records[0].row[7]
end
end