- {token && token.logo ? (
-
+ {!!token && !!token.logo ? (
+
{
+ ev.target.onerror = null;
+ ev.target.src = defaultTokenImage;
+ }}
+ />
) : (
-
+
)}
@@ -53,6 +61,7 @@ TokenCard.defaultProps = {
TokenCard.propTypes = {
decimals: PropTypes.number.isRequired,
+ defaultTokenImage: PropTypes.string,
token: PropTypes.shape({
logo: PropTypes.string,
name: PropTypes.string,
diff --git a/packages/fether-ui/src/assets/img/tokens/token-placeholder-128x128.jpg b/packages/fether-ui/src/assets/img/tokens/token-placeholder-128x128.jpg
new file mode 100644
index 000000000..f6f9a5fbe
Binary files /dev/null and b/packages/fether-ui/src/assets/img/tokens/token-placeholder-128x128.jpg differ
diff --git a/scripts/updateTokens/README.md b/scripts/updateTokens/README.md
new file mode 100644
index 000000000..dba63f84d
--- /dev/null
+++ b/scripts/updateTokens/README.md
@@ -0,0 +1,7 @@
+# Update Tokens Script
+
+This folder contains a script which will fetch all the tokens from https://github.com/ethereum-lists/tokens, and output all the tokens in a nice JSON file.
+
+This entire folder has been taken from https://github.com/MyCryptoHQ/MyCrypto/tree/574c628e6189de5b7848c4c10258bc22f42b337c/scripts. Thank you MyCrypto for this.
+
+Please also see [this PR](https://github.com/MyCryptoHQ/MyCrypto/pull/1247) for a description of this script.
diff --git a/scripts/updateTokens/index.ts b/scripts/updateTokens/index.ts
new file mode 100644
index 000000000..3633843b1
--- /dev/null
+++ b/scripts/updateTokens/index.ts
@@ -0,0 +1 @@
+import './update-tokens';
diff --git a/scripts/updateTokens/tsconfig.json b/scripts/updateTokens/tsconfig.json
new file mode 100644
index 000000000..7d4c94efa
--- /dev/null
+++ b/scripts/updateTokens/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "compilerOptions": {
+ "lib": [
+ "es2015"
+ ]
+ }
+}
\ No newline at end of file
diff --git a/scripts/updateTokens/types/CommitStatus.ts b/scripts/updateTokens/types/CommitStatus.ts
new file mode 100644
index 000000000..8ec09992b
--- /dev/null
+++ b/scripts/updateTokens/types/CommitStatus.ts
@@ -0,0 +1,31 @@
+export interface Creator {
+ login: string;
+ id: number;
+ avatar_url: string;
+ gravatar_id: string;
+ url: string;
+ html_url: string;
+ followers_url: string;
+ following_url: string;
+ gists_url: string;
+ starred_url: string;
+ subscriptions_url: string;
+ organizations_url: string;
+ repos_url: string;
+ events_url: string;
+ received_events_url: string;
+ type: string;
+ site_admin: boolean;
+}
+
+export interface CommitStatus {
+ url: string;
+ id: number;
+ state: string;
+ description: string;
+ target_url: string;
+ context: string;
+ created_at: Date;
+ updated_at: Date;
+ creator: Creator;
+}
diff --git a/scripts/updateTokens/types/GitCommit.ts b/scripts/updateTokens/types/GitCommit.ts
new file mode 100644
index 000000000..5b876e3e9
--- /dev/null
+++ b/scripts/updateTokens/types/GitCommit.ts
@@ -0,0 +1,90 @@
+export interface Author {
+ name: string;
+ email: string;
+ date: Date;
+}
+
+export interface Committer {
+ name: string;
+ email: string;
+ date: Date;
+}
+
+export interface Tree {
+ sha: string;
+ url: string;
+}
+
+export interface Verification {
+ verified: boolean;
+ reason: string;
+ signature?: any;
+ payload?: any;
+}
+
+export interface Commit {
+ author: Author;
+ committer: Committer;
+ message: string;
+ tree: Tree;
+ url: string;
+ comment_count: number;
+ verification: Verification;
+}
+
+export interface Author2 {
+ login: string;
+ id: number;
+ avatar_url: string;
+ gravatar_id: string;
+ url: string;
+ html_url: string;
+ followers_url: string;
+ following_url: string;
+ gists_url: string;
+ starred_url: string;
+ subscriptions_url: string;
+ organizations_url: string;
+ repos_url: string;
+ events_url: string;
+ received_events_url: string;
+ type: string;
+ site_admin: boolean;
+}
+
+export interface Committer2 {
+ login: string;
+ id: number;
+ avatar_url: string;
+ gravatar_id: string;
+ url: string;
+ html_url: string;
+ followers_url: string;
+ following_url: string;
+ gists_url: string;
+ starred_url: string;
+ subscriptions_url: string;
+ organizations_url: string;
+ repos_url: string;
+ events_url: string;
+ received_events_url: string;
+ type: string;
+ site_admin: boolean;
+}
+
+export interface Parent {
+ sha: string;
+ url: string;
+ html_url: string;
+}
+
+export interface GitCommit {
+ sha: string;
+ commit: Commit;
+ url: string;
+ html_url: string;
+ comments_url: string;
+ author: Author2;
+ committer: Committer2;
+ parents: Parent[];
+}
diff --git a/scripts/updateTokens/types/Token.ts b/scripts/updateTokens/types/Token.ts
new file mode 100644
index 000000000..178f3cb53
--- /dev/null
+++ b/scripts/updateTokens/types/Token.ts
@@ -0,0 +1,6 @@
+export interface Token {
+ address: string;
+ symbol: string;
+ decimals: number;
+ error?: string | null;
+}
diff --git a/scripts/updateTokens/types/TokensJson.ts b/scripts/updateTokens/types/TokensJson.ts
new file mode 100644
index 000000000..5fb7d8c7f
--- /dev/null
+++ b/scripts/updateTokens/types/TokensJson.ts
@@ -0,0 +1,26 @@
+export interface RawTokenJSON {
+ name?: string;
+ symbol?: string;
+ address?: string;
+ decimals?: number | string;
+}
+
+export interface ValidatedTokenJSON {
+ name: string;
+ symbol: string;
+ address: string;
+ decimals: number | string;
+ logo: tokenLogo;
+}
+
+export interface tokenLogo {
+ src: string;
+}
+
+export interface NormalizedTokenJSON {
+ name: string;
+ symbol: string;
+ address: string;
+ decimals: number;
+ logo?: string;
+}
diff --git a/scripts/updateTokens/update-tokens-utils.ts b/scripts/updateTokens/update-tokens-utils.ts
new file mode 100644
index 000000000..44441ce9e
--- /dev/null
+++ b/scripts/updateTokens/update-tokens-utils.ts
@@ -0,0 +1,167 @@
+import {
+ RawTokenJSON,
+ ValidatedTokenJSON,
+ NormalizedTokenJSON
+} from "./types/TokensJson";
+import { Token } from "./types/Token";
+interface StrIdx
{
+ [key: string]: T;
+}
+
+const networks = [
+ {
+ networkName: "foundation",
+ networkNameIPFS: "eth"
+ },
+ {
+ networkName: "goerli",
+ networkNameIPFS: "gor"
+ },
+ {
+ networkName: "kovan",
+ networkNameIPFS: "kov"
+ },
+ {
+ networkName: "ropsten",
+ networkNameIPFS: "rop"
+ }
+];
+
+function processTokenJson(tokensJson: RawTokenJSON[]): Token[] {
+ const normalizedTokens = tokensJson
+ .map(validateTokenJSON)
+ .map(normalizeTokenJSON);
+ checkForDuplicateAddresses(normalizedTokens);
+ return handleDuplicateSymbols(normalizedTokens);
+}
+
+function validateTokenJSON(token: RawTokenJSON): ValidatedTokenJSON {
+ const isValid = (t: RawTokenJSON): t is ValidatedTokenJSON =>
+ !!(t.address && (t.decimals || t.decimals === 0) && t.name && t.symbol);
+
+ if (isValid(token)) {
+ return token;
+ }
+ throw Error(`Token failed validation, missing part of schema
+ Symbol: ${token.symbol}
+ Name: ${token.name}
+ Address: ${token.address}
+ Decimals: ${token.decimals}`);
+}
+
+function normalizeTokenJSON(token: ValidatedTokenJSON): NormalizedTokenJSON {
+ const { address, decimals, symbol, name, logo } = token;
+ return {
+ address,
+ symbol,
+ decimals: +decimals,
+ name,
+ ...(!!logo && logo.src ? { logo: logo.src } : {})
+ };
+}
+
+/**
+ *
+ * @description Checks for any duplicated addresses and halts the program if so
+ * @param {NormalizedTokenJSON[]} tokens
+ */
+function checkForDuplicateAddresses(tokens: NormalizedTokenJSON[]) {
+ const map: StrIdx = {};
+ const errors: string[] = [];
+ for (const token of tokens) {
+ const { address } = token;
+ // We might want to strip hex prefixes here, and make all characters lowercase
+ if (map[address]) {
+ errors.push(
+ `Token ${token.symbol} has a duplicate address of ${token.address}`
+ );
+ }
+ map[address] = true;
+ }
+
+ if (errors.length) {
+ const err = errors.join("\n");
+ throw Error(err);
+ }
+}
+
+/**
+ *
+ * @description Finds any duplicated names in the fetched token json
+ * @param {NormalizedTokenJSON[]} tokens
+ * @returns
+ */
+function getDuplicatedNames(tokens: NormalizedTokenJSON[]) {
+ const checkedNames: StrIdx = {};
+ const duplicatedNames: StrIdx = {};
+ for (const token of tokens) {
+ const { name } = token;
+ if (checkedNames[name]) {
+ duplicatedNames[name] = true;
+ }
+ checkedNames[name] = true;
+ }
+ return duplicatedNames;
+}
+
+/**
+ *
+ * @description Handles any tokens with duplicated symbols by placing them in a map with each value being a bucket
+ * of other tokens with the same symbol, then renaming them appropriately so they do not conflict anymore
+ * @param {NormalizedTokenJSON[]} tokens
+ * @returns
+ */
+function handleDuplicateSymbols(tokens: NormalizedTokenJSON[]) {
+ // start by building a map of symbols => tokens
+ const map = new Map();
+ for (const token of tokens) {
+ const { symbol } = token;
+ const v = map.get(symbol);
+ if (v) {
+ map.set(symbol, [...v, token]);
+ } else {
+ map.set(symbol, [token]);
+ }
+ }
+ const duplicatedNames = getDuplicatedNames(tokens);
+ const dedupedTokens: NormalizedTokenJSON[] = [];
+ map.forEach(tokenBucket =>
+ dedupedTokens.push(...renameSymbolCollisions(tokenBucket, duplicatedNames))
+ );
+ return dedupedTokens;
+}
+
+/**
+ *
+ * @description Any token collisions are handled in this manner:
+ * 1) If the name isnt a duplicate, the token symbol is prefixed with the token name
+ * 2) if it is a duplicate, then we simply use the token index + 1 (so we dont start at 0)
+ * @param {NormalizedTokenJSON[]} tokens
+ * @param {StrIdx} duplicatedNames
+ * @returns
+ */
+function renameSymbolCollisions(
+ tokens: NormalizedTokenJSON[],
+ duplicatedNames: StrIdx
+) {
+ const renamedTokens: NormalizedTokenJSON[] = [];
+ if (tokens.length === 1) {
+ return tokens;
+ }
+
+ return tokens.reduce((prev, curr, idx) => {
+ const newName = `${curr.symbol} (${
+ duplicatedNames[curr.name] ? idx + 1 : curr.name
+ })`;
+ const tokenToInsert: NormalizedTokenJSON = {
+ ...curr,
+ symbol: newName
+ };
+ console.warn(
+ `WARN: "${curr.symbol}" has a duplicate symbol, renaming to "${newName}"`
+ );
+ return [...prev, tokenToInsert];
+ }, renamedTokens);
+}
+
+export { networks, processTokenJson };
diff --git a/scripts/updateTokens/update-tokens.ts b/scripts/updateTokens/update-tokens.ts
new file mode 100644
index 000000000..345b31e24
--- /dev/null
+++ b/scripts/updateTokens/update-tokens.ts
@@ -0,0 +1,139 @@
+import { CommitStatus } from "./types/CommitStatus";
+import { GitCommit } from "./types/GitCommit";
+import { RawTokenJSON } from "./types/TokensJson";
+
+import * as fs from "fs";
+import * as https from "https";
+import { networks, processTokenJson } from "./update-tokens-utils";
+import * as path from "path";
+
+const hardcoded_ipfs_files = {
+ eth:
+ "https://cloudflare-ipfs.com/ipfs/QmUJJpSQXWiKh6Jex6wLSZ1RWND8CxJu6XQMb7v2ByQhTR",
+ kov:
+ "https://cloudflare-ipfs.com/ipfs/QmZUXkAH69BpjJWcpND5HnQVsro6CXVxKiSX9vK49KsyZn",
+ rop:
+ "https://cloudflare-ipfs.com/ipfs/QmRAzyMEFNFFRqKTMcpk5qDdTpctgTDQU2PN8RPXSt5guj"
+};
+
+function httpsGet(opts: any): Promise {
+ return new Promise(resolve => {
+ https.get(opts, (res: any) => {
+ let body = "";
+ res.setEncoding("utf8");
+ res.on("data", (data: any) => (body += data));
+ res.on("end", () => {
+ resolve(body);
+ });
+ });
+ });
+}
+
+function githubApi(pathTail: string): Promise {
+ return httpsGet({
+ hostname: "api.github.com",
+ path: `/repos/ethereum-lists/tokens${pathTail}`,
+ headers: {
+ "user-agent": "node",
+ "content-type": "application/json; charset=utf-8"
+ }
+ }).then(body => JSON.parse(body));
+}
+
+async function getIPFSaddresses() {
+ let useHardcodedAddresses = false;
+
+ // parse the command line params passed to the script
+ // if "--use-hardcoded-ipfs-addresses" was passed
+ // we use the hardcoded ipfs files defined at the top
+ // of this script
+ process.argv.forEach(function(val, index, array) {
+ if (val === "--use-hardcoded-ipfs-addresses") {
+ useHardcodedAddresses = true;
+ }
+ });
+
+ if (useHardcodedAddresses) {
+ console.log(`Using hardcoded IPFS addresses...`);
+ return hardcoded_ipfs_files;
+ } else {
+ // First we fetch the latest commit from ethereum-lists/tokens
+ console.log("Fetching ethereum-lists/tokens commits...");
+ const commits = await githubApi("/commits");
+ const commit = commits[0];
+
+ // Then we fetch its build status
+ console.log("Fetching commits statuses...");
+ const statuses = await githubApi(`/statuses/${commit.sha}`);
+
+ // Fetch the IPFS link, which is a page of links to other IPFS links
+ console.log("Fetching IPFS output HTML...");
+ const ipfsUrl = statuses.find(status => status.target_url.includes("ipfs"));
+ if (!ipfsUrl) {
+ throw Error("ipfs url not found");
+ }
+ const ipfsTargetUrl = ipfsUrl.target_url;
+ const ipfsHtml = await httpsGet(ipfsTargetUrl);
+
+ // Get the IPFS url for the each network tokens json. Regexxing HTML hurts, but w/e
+ const ipfs_files = {};
+ networks.forEach(async network => {
+ console.log(`Fetching IPFS ${network.networkName} Tokens JSON...`);
+ const regex = `output\/full\/${
+ network.networkNameIPFS
+ }\.json<\/a>`;
+ const tokenUrlMatch = ipfsHtml.match(regex);
+ if (!tokenUrlMatch) {
+ throw Error("No match found for token url");
+ }
+ ipfs_files[network.networkNameIPFS] = tokenUrlMatch[1];
+ });
+ return ipfs_files;
+ }
+}
+
+async function run() {
+ // get the list of file to use per network
+ const ipfsAddresses = await getIPFSaddresses();
+ console.log("Using the following addresses: ", ipfsAddresses);
+
+ networks.forEach(async network => {
+ if (!!ipfsAddresses[network.networkNameIPFS]) {
+ const tokensUrl = ipfsAddresses[network.networkNameIPFS];
+ const tokensJson: RawTokenJSON[] = JSON.parse(await httpsGet(tokensUrl));
+
+ // Format the json to match our format in /packages/fether-react/src/assets/tokens/.json
+ const tokens = processTokenJson(tokensJson);
+ // Write to the file
+ console.log(
+ `Writing Tokens JSON to /packages/fether-react/src/assets/tokens/${
+ network.networkName
+ }.json...`
+ );
+ const filePath = path.resolve(
+ __dirname,
+ `../../packages/fether-react/src/assets/tokens/${
+ network.networkName
+ }.json`
+ );
+ fs.writeFile(
+ filePath,
+ JSON.stringify(tokens, null, 2),
+ "utf8",
+ (err: any) => {
+ if (err) {
+ console.error(err);
+ throw new Error(
+ "Failed to write tokens json to file, see above error"
+ );
+ }
+ console.log("Succesfully imported", tokens.length, "tokens!");
+ }
+ );
+ } else {
+ console.log(`No IPFS file found for ${network.networkName}!`);
+ }
+ });
+}
+
+run();
diff --git a/yarn.lock b/yarn.lock
index 962419c69..1c6bdfc7e 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2255,6 +2255,11 @@ are-we-there-yet@~1.1.2:
delegates "^1.0.0"
readable-stream "^2.0.6"
+arg@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.0.tgz#583c518199419e0037abb74062c37f8519e575f0"
+ integrity sha512-ZWc51jO3qegGkVh8Hwpv636EkbesNV5ZNQPCtRa+0qytRYPEs9IYT9qITY9buezqUH5uqyzlWLcufrzU2rffdg==
+
argparse@^1.0.7:
version "1.0.10"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
@@ -4981,7 +4986,7 @@ dezalgo@^1.0.0:
asap "^2.0.0"
wrappy "1"
-diff@^3.2.0:
+diff@^3.1.0, diff@^3.2.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
@@ -9439,6 +9444,11 @@ make-dir@^1.0.0, make-dir@^1.2.0:
dependencies:
pify "^3.0.0"
+make-error@^1.1.1:
+ version "1.3.5"
+ resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.5.tgz#efe4e81f6db28cadd605c70f29c831b58ef776c8"
+ integrity sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==
+
make-fetch-happen@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-4.0.1.tgz#141497cb878f243ba93136c83d8aba12c216c083"
@@ -14652,6 +14662,17 @@ tryer@^1.0.0:
resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8"
integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==
+ts-node@^8.0.3:
+ version "8.0.3"
+ resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.0.3.tgz#aa60b836a24dafd8bf21b54766841a232fdbc641"
+ integrity sha512-2qayBA4vdtVRuDo11DEFSsD/SFsBXQBRZZhbRGSIkmYmVkWjULn/GGMdG10KVqkaGndljfaTD8dKjWgcejO8YA==
+ dependencies:
+ arg "^4.1.0"
+ diff "^3.1.0"
+ make-error "^1.1.1"
+ source-map-support "^0.5.6"
+ yn "^3.0.0"
+
tslib@^1.9.0:
version "1.9.3"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286"
@@ -14701,6 +14722,11 @@ typedarray@^0.0.6:
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=
+typescript@^3.3.4000:
+ version "3.3.4000"
+ resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.3.4000.tgz#76b0f89cfdbf97827e1112d64f283f1151d6adf0"
+ integrity sha512-jjOcCZvpkl2+z7JFn0yBOoLQyLoIkNZAs/fYJkUG6VKy6zLPHJGfQJYFHzibB6GJaF/8QrcECtlQ5cpvRHSMEA==
+
ua-parser-js@^0.7.18:
version "0.7.19"
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.19.tgz#94151be4c0a7fb1d001af7022fdaca4642659e4b"
@@ -15881,6 +15907,11 @@ yauzl@^2.4.2:
buffer-crc32 "~0.2.3"
fd-slicer "~1.1.0"
+yn@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/yn/-/yn-3.0.0.tgz#0073c6b56e92aed652fbdfd62431f2d6b9a7a091"
+ integrity sha512-+Wo/p5VRfxUgBUGy2j/6KX2mj9AYJWOHuhMjMcbBFc3y54o9/4buK1ksBvuiK01C3kby8DH9lSmJdSxw+4G/2Q==
+
zen-observable@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/zen-observable/-/zen-observable-0.2.1.tgz#c47676a64132b8475a61aa49e514755b5b9663f3"