Skip to content

Commit

Permalink
Merge branch 'confirm-following-staking-banner' of github.com:dfinity…
Browse files Browse the repository at this point in the history
…/nns-dapp into confirm-following-staking-banner
  • Loading branch information
mstrasinskis committed Nov 29, 2024
2 parents 41c54e8 + 9ba7cca commit e5ff19c
Show file tree
Hide file tree
Showing 10 changed files with 362 additions and 67 deletions.
2 changes: 1 addition & 1 deletion dfx.json
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@
"DIDC_VERSION": "didc 0.4.0",
"POCKETIC_VERSION": "3.0.1",
"CARGO_SORT_VERSION": "1.0.9",
"SNSDEMO_RELEASE": "release-2024-11-20",
"SNSDEMO_RELEASE": "release-2024-11-27",
"IC_COMMIT_FOR_PROPOSALS": "release-2024-11-21_03-11-24.04-base-kernel",
"IC_COMMIT_FOR_SNS_AGGREGATOR": "release-2024-11-21_03-11-24.04-base-kernel"
},
Expand Down
20 changes: 12 additions & 8 deletions frontend/src/lib/components/header/ExportNeuronsButton.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -93,15 +93,19 @@
const metadataDate = nanoSecondsToDateTime(nowInBigIntNanoSeconds());
await generateCsvFileToSave({
data: humanFriendlyContent,
metadata: [
datasets: [
{
label: $i18n.export_csv_neurons.account_id_label,
value: nnsAccountPrincipal.toString(),
},
{
label: $i18n.export_csv_neurons.date_label,
value: metadataDate,
data: humanFriendlyContent,
metadata: [
{
label: $i18n.export_csv_neurons.account_id_label,
value: nnsAccountPrincipal.toString(),
},
{
label: $i18n.export_csv_neurons.date_label,
value: metadataDate,
},
],
},
],
headers: [
Expand Down
61 changes: 61 additions & 0 deletions frontend/src/lib/services/export-data.services.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import { getTransactions } from "$lib/api/icp-index.api";
import { type SignIdentity } from "@dfinity/agent";
import type { TransactionWithId } from "@dfinity/ledger-icp";
import { isNullish } from "@dfinity/utils";

export const getAllTransactionsFromAccountAndIdentity = async ({
accountId,
identity,
lastTransactionId = undefined,
allTransactions = [],
currentPageIndex = 1,
}: {
accountId: string;
identity: SignIdentity;
lastTransactionId?: bigint;
allTransactions?: TransactionWithId[];
currentPageIndex?: number;
}): Promise<TransactionWithId[] | undefined> => {
const pageSize = 100n;
const maxNumberOfPages = 10;

try {
// TODO: Decide what to do if we reach the maximum number of iterations.
if (currentPageIndex > maxNumberOfPages) {
console.warn(
`Reached maximum limit of iterations(${maxNumberOfPages}). Stopping.`
);
return allTransactions;
}

const { transactions, oldestTxId } = await getTransactions({
accountIdentifier: accountId,
identity,
maxResults: pageSize,
start: lastTransactionId,
});

const updatedTransactions = [...allTransactions, ...transactions];

// We consider it complete if we find the oldestTxId in the list of transactions or if oldestTxId is null.
// The latter condition is necessary if the list of transactions is empty, which would otherwise return false.
const completed =
isNullish(oldestTxId) || transactions.some(({ id }) => id === oldestTxId);

if (!completed) {
const lastTx = transactions[transactions.length - 1];
return getAllTransactionsFromAccountAndIdentity({
accountId,
identity,
lastTransactionId: lastTx.id,
allTransactions: updatedTransactions,
currentPageIndex: currentPageIndex + 1,
});
}

return updatedTransactions;
} catch (error) {
console.error("Error loading ICP account transactions:", error);
return allTransactions;
}
};
8 changes: 6 additions & 2 deletions frontend/src/lib/services/icp-transactions.services.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { icpTransactionsStore } from "$lib/stores/icp-transactions.store";
import { toastsError } from "$lib/stores/toasts.store";
import { toToastError } from "$lib/utils/error.utils";
import { sortTransactionsByIdDescendingOrder } from "$lib/utils/icp-transactions.utils";
import { nonNullish } from "@dfinity/utils";
import { isNullish, nonNullish } from "@dfinity/utils";
import { get } from "svelte/store";
import { getCurrentIdentity } from "./auth.services";

Expand All @@ -27,7 +27,11 @@ export const loadIcpAccountTransactions = async ({
start,
});

const completed = transactions.some(({ id }) => id === oldestTxId);
// We consider it complete if we find the oldestTxId in the list of transactions or if oldestTxId is null.
// The latter condition is necessary if the list of transactions is empty, which would otherwise return false.
const completed =
isNullish(oldestTxId) || transactions.some(({ id }) => id === oldestTxId);

icpTransactionsStore.addTransactions({
accountIdentifier,
transactions,
Expand Down
86 changes: 49 additions & 37 deletions frontend/src/lib/utils/export-to-csv.utils.ts
Original file line number Diff line number Diff line change
@@ -1,26 +1,20 @@
import { isNullish } from "@dfinity/utils";

export type Metadata = {
type Metadata = {
label: string;
value: string;
};

type Dataset<T> = {
data: T[];
metadata?: Metadata[];
};

export type CsvHeader<T> = {
id: keyof T;
label: string;
};

interface CsvBaseConfig<T> {
data: T[];
headers: CsvHeader<T>[];
metadata?: Metadata[];
}

interface CsvFileConfig<T> extends CsvBaseConfig<T> {
fileName?: string;
description?: string;
}

const escapeCsvValue = (value: unknown): string => {
if (isNullish(value)) return "";

Expand Down Expand Up @@ -49,7 +43,11 @@ export const convertToCsv = <T>({
data,
headers,
metadata = [],
}: CsvBaseConfig<T>) => {
}: {
data: T[];
headers: CsvHeader<T>[];
metadata?: Metadata[];
}) => {
if (headers.length === 0) return "";

const PAD_LEFT_WHEN_METADATA_PRESENT = 2;
Expand Down Expand Up @@ -162,44 +160,58 @@ const saveFileWithAnchor = ({
}
};

export const combineDatasetsToCsv = <T>({
datasets,
headers,
}: {
headers: CsvHeader<T>[];
datasets: Dataset<T>[];
}): string => {
const csvParts: string[] = [];
// A double empty line break requires 3 new lines
const doubleCsvLineBreak = "\n\n\n";

for (const dataset of datasets) {
const { data, metadata } = dataset;
const csvContent = convertToCsv<T>({ data, headers, metadata });
csvParts.push(csvContent);
}
return csvParts.join(doubleCsvLineBreak);
};

/**
* Downloads data as a Csv file using either the File System Access API or fallback method.
* Downloads data as a single CSV file combining multiple datasets, using either the File System Access API or fallback method.
*
* @param options - Configuration object for the Csv download
* @param options.data - Array of objects to be converted to Csv. Each object should have consistent keys. It uses first object to check for consistency
* @param options.headers - Array of objects defining the headers and their order in the CSV. Each object should include an `id` key that corresponds to a key in the data objects.
* @param options.meatadata - Array of objects defining the metadata to be included in the CSV. Each object should include a `label` and `value` key. When present the main table will be shifted two columns to the left.
* @param options - Configuration object for the CSV download
* @param options.datasets - Array of dataset objects to be combined into a single CSV
* @param options.datasets[].data - Array of objects to be converted to CSV. Each object should have consistent keys
* @param options.datasets[].metadata - Optional array of metadata objects. Each object should include a `label` and `value` key. When present, the corresponding data will be shifted two columns to the left
* @param options.headers - Array of objects defining the headers and their order in the CSV. Each object should include an `id` key that corresponds to a key in the data objects
* @param options.fileName - Name of the file without extension (defaults to "data")
* @param options.description - File description for save dialog (defaults to " Csv file")
*
* @example
* await generateCsvFileToSave({
* data: [
* { name: "John", age: 30 },
* { name: "Jane", age: 25 }
* ],
* headers: [
* { id: "name" },
* { id: "age" }
* ],
* });
* @param options.description - File description for save dialog (defaults to "Csv file")
*
* @throws {FileSystemAccessError|CsvGenerationError} If there is an issue accessing the file system or generating the Csv
* @returns {Promise<void>} Promise that resolves when the file has been downloaded
* @throws {FileSystemAccessError|CsvGenerationError} If there is an issue accessing the file system or generating the CSV
* @returns {Promise<void>} Promise that resolves when the combined CSV file has been downloaded
*
* @remarks
* - Uses the modern File System Access API when available, falling back to traditional download method
* - Automatically handles values containing special characters like commas and new lines
* - Combines multiple datasets into a single CSV file, maintaining their respective metadata
* - Each dataset's data and metadata will be appended sequentially in the final CSV
*/
export const generateCsvFileToSave = async <T>({
data,
datasets,
headers,
metadata,
fileName = "data",
description = "Csv file",
}: CsvFileConfig<T>): Promise<void> => {
}: {
fileName?: string;
description?: string;
headers: CsvHeader<T>[];
datasets: Dataset<T>[];
}): Promise<void> => {
try {
const csvContent = convertToCsv<T>({ data, headers, metadata });
const csvContent = combineDatasetsToCsv({ datasets, headers });

const blob = new Blob([csvContent], {
type: "text/csv;charset=utf-8;",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,20 +90,24 @@ describe("ExportNeuronsButton", () => {
await po.click();
expect(generateCsvFileToSave).toBeCalledWith(
expect.objectContaining({
data: expect.arrayContaining([
datasets: expect.arrayContaining([
expect.objectContaining({
neuronId: "1",
project: "Internet Computer",
symbol: "ICP",
neuronAccountId:
"d0654c53339c85e0e5fff46a2d800101bc3d896caef34e1a0597426792ff9f32",
controllerId: "1",
creationDate: "Jan 1, 1970",
dissolveDate: "N/A",
dissolveDelaySeconds: "3 hours, 5 minutes",
stakedMaturity: "0",
stake: "30.00",
state: "Locked",
data: expect.arrayContaining([
expect.objectContaining({
neuronId: "1",
project: "Internet Computer",
symbol: "ICP",
neuronAccountId:
"d0654c53339c85e0e5fff46a2d800101bc3d896caef34e1a0597426792ff9f32",
controllerId: "1",
creationDate: "Jan 1, 1970",
dissolveDate: "N/A",
dissolveDelaySeconds: "3 hours, 5 minutes",
stakedMaturity: "0",
stake: "30.00",
state: "Locked",
}),
]),
}),
]),
})
Expand Down
Loading

0 comments on commit e5ff19c

Please sign in to comment.