Skip to content

Commit

Permalink
Implement fetching of most positive/negative/replied/retweeted tweets…
Browse files Browse the repository at this point in the history
… per slug
  • Loading branch information
IvanIvanoff committed Nov 27, 2024
1 parent 1934339 commit b306cd2
Show file tree
Hide file tree
Showing 4 changed files with 126 additions and 0 deletions.
79 changes: 79 additions & 0 deletions lib/sanbase/social_data/tweet.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
defmodule Sanbase.SocialData.Tweet do
require Mockery.Macro
@tweet_types [:most_positive, :most_negative, :most_retweets, :most_replies]
@tweet_type_mapping %{
most_positive: :sentiment_pos,
most_negative: :sentiment_neg,
most_retweets: :retweet,
most_replies: :reply
}
@recv_timeout 25_000
def get_most_tweets(%{} = selector, type, from, to, interval) do
slugs = (Map.get(selector, :slug) || Map.get(selector, :slugs)) |> List.wrap()

tweets_request(slugs, type, from, to, interval)
|> handle_tweets_response()
end

defp handle_tweets_response({:ok, %HTTPoison.Response{status_code: 200, body: json_body}}) do
case Jason.decode(json_body) do
{:ok, %{"data" => data}} -> {:ok, decode_tweets_data(data)}
_ -> {:error, "Malformed response fetching tweets"}
end
end

defp handle_tweets_response({:ok, %HTTPoison.Response{status_code: status}}) do
{:error, "Error status #{status} fetching tweets"}
end

defp decode_tweets_data(data_map) when is_map(data_map) do
data_map
|> Enum.map(fn {slug, json_list} ->
list = Jason.decode!(json_list)

tweets =
Enum.map(list, fn map ->
%{
text: Map.fetch!(map, "text"),
screen_name: Map.fetch!(map, "screen_name"),
datetime:
Map.fetch!(map, "timestamp")
|> NaiveDateTime.from_iso8601!()
|> DateTime.from_naive!("Etc/UTC"),
# TODO: Replace with fetch! after metricshub is updated
replies_count: Map.get(map, "reply"),
sentiment_pos: Map.get(map, "sentiment_pos"),
sentiment_neg: Map.get(map, "sentiment_neg"),
retweets_count: Map.get(map, "retweet")
}
end)

%{slug: slug, tweets: tweets}
end)
end

defp tweets_request(slugs, type, from, to, interval)
when type in @tweet_types and is_list(slugs) do
url = Path.join([metrics_hub_url(), "fetch_documents"])

options = [
recv_timeout: @recv_timeout,
params: [
{"slugs", slugs |> List.wrap() |> Enum.join(",")},
{"from_timestamp", from |> DateTime.truncate(:second) |> DateTime.to_iso8601()},
{"to_timestamp", to |> DateTime.truncate(:second) |> DateTime.to_iso8601()},
{"interval", interval},
{"source", "twitter"},
{"most_type", Map.fetch!(@tweet_type_mapping, type)}
]
]

http_client().get(url, [], options)
end

defp http_client, do: Mockery.Macro.mockable(HTTPoison)

defp metrics_hub_url() do
Sanbase.Utils.Config.module_get(Sanbase.SocialData, :metricshub_url)
end
end
8 changes: 8 additions & 0 deletions lib/sanbase_web/graphql/resolvers/social_data_resolver.ex
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,14 @@ defmodule SanbaseWeb.Graphql.Resolvers.SocialDataResolver do

@context_words_default_size 10

def get_most_tweets(
_root,
%{selector: selector, from: from, to: to, interval: interval, tweet_type: type},
_resolution
) do
SocialData.Tweet.get_most_tweets(selector, type, from, to, interval)
end

def get_metric_spike_explanations(
_root,
%{metric: metric, slug: slug, from: from, to: to},
Expand Down
12 changes: 12 additions & 0 deletions lib/sanbase_web/graphql/schema/queries/social_data_queries.ex
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,18 @@ defmodule SanbaseWeb.Graphql.Schema.SocialDataQueries do
alias SanbaseWeb.Graphql.Resolvers.SocialDataResolver

object :social_data_queries do
field :get_most_tweets, list_of(:slug_tweets_object) do
meta(access: :restricted)

arg(:selector, non_null(:selector_slug_or_slug_input_object))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, non_null(:interval))
arg(:tweet_type, non_null(:most_tweet_type))

cache_resolve(&SocialDataResolver.get_most_tweets/3)
end

field :get_metric_spike_explanations, list_of(:metric_spike_explanation) do
meta(access: :free)

Expand Down
27 changes: 27 additions & 0 deletions lib/sanbase_web/graphql/schema/types/social_data_types.ex
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@ defmodule SanbaseWeb.Graphql.SocialDataTypes do
alias SanbaseWeb.Graphql.Resolvers.SocialDataResolver
import SanbaseWeb.Graphql.Cache, only: [cache_resolve: 1]

enum :most_tweet_type do
value(:most_positive)
value(:most_negative)
value(:most_retweets)
value(:most_replies)
end

enum :trending_word_type_filter do
value(:project)
value(:non_project)
Expand All @@ -30,6 +37,26 @@ defmodule SanbaseWeb.Graphql.SocialDataTypes do
value(:telegram_discussion_overview)
end

input_object :selector_slug_or_slug_input_object do
field(:slug, :string)
field(:slugs, list_of(:string))
end

object :slug_tweets_object do
field(:slug, non_null(:string))
field(:tweets, list_of(:tweet))
end

object :tweet do
field(:datetime, non_null(:datetime))
field(:text, non_null(:string))
field(:screen_name, non_null(:string))
field(:sentiment_pos, :float)
field(:sentiment_neg, :float)
field(:replies_count, :integer)
field(:retweets_count, :integer)
end

object :metric_spike_explanation do
field(:spike_start_datetime, non_null(:datetime))
field(:spike_end_datetime, non_null(:datetime))
Expand Down

0 comments on commit b306cd2

Please sign in to comment.