diff --git a/docs/_freeze/posts/walking-talking-cube/index/execute-results/html.json b/docs/_freeze/posts/walking-talking-cube/index/execute-results/html.json new file mode 100644 index 000000000000..e355e47a86b1 --- /dev/null +++ b/docs/_freeze/posts/walking-talking-cube/index/execute-results/html.json @@ -0,0 +1,16 @@ +{ + "hash": "db88c501535c5068b03c526d267552e4", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: \"Taking a random cube for a walk and making it talk\"\nauthor: \"Cody Peterson\"\ndate: \"2024-09-26\"\nimage: thumbnail.png\ncategories:\n - blog\n - duckdb\n - udfs\n---\n\n***Synthetic data with Ibis, DuckDB, Python UDFs, and Faker.***\n\nTo follow along, install the required libraries:\n\n```bash\npip install 'ibis-framework[duckdb]' faker plotly\n```\n\n## A random cube\n\nWe'll generate a random cube of data with Ibis (default DuckDB backend) and\nvisualize it as a 3D line plot:\n\n::: {#0daf46ce .cell execution_count=2}\n``` {.python .cell-code code-fold=\"true\" code-summary=\"Show me the code!\"}\nimport ibis # <1>\nimport ibis.selectors as s\nimport plotly.express as px # <1>\n\nibis.options.interactive = True # <2>\nibis.options.repr.interactive.max_rows = 5 # <2>\n\ncon = ibis.connect(\"duckdb://synthetic.ddb\") # <3>\n\nif \"source\" in con.list_tables():\n t = con.table(\"source\") # <4>\nelse:\n lookback = ibis.interval(days=1) # <5>\n step = ibis.interval(seconds=1) # <5>\n\n t = (\n (\n ibis.range( # <6>\n ibis.now() - lookback,\n ibis.now(),\n step=step,\n ) # <6>\n .unnest() # <7>\n .name(\"timestamp\") # <8>\n .as_table() # <9>\n )\n .mutate(\n index=(ibis.row_number().over(order_by=\"timestamp\")), # <10>\n **{col: 2 * (ibis.random() - 0.5) for col in [\"a\", \"b\", \"c\"]}, # <11>\n )\n .mutate(color=ibis._[\"index\"].histogram(nbins=8)) # <12>\n .drop(\"index\") # <13>\n .relocate(\"timestamp\", \"color\") # <14>\n .order_by(\"timestamp\") # <15>\n )\n\n t = con.create_table(\"source\", t.to_pyarrow()) # <16>\n\nc = px.line_3d( # <17>\n t,\n x=\"a\",\n y=\"b\",\n z=\"c\",\n color=\"color\",\n hover_data=[\"timestamp\"],\n) # <17>\nc\n```\n\n::: {.cell-output .cell-output-display}\n```{=html}\n
\n```\n:::\n:::\n\n\n1. Import the necessary libraries.\n2. Enable interactive mode for Ibis.\n3. Connect to an on-disk DuckDB database.\n4. Load the table if it already exists.\n5. Define the time range and step for the data.\n6. Create the array of timestamps.\n7. Unnest the array to a column.\n8. Name the column \"timestamp\".\n9. Convert the column into a table.\n10. Create a monotonically increasing index column.\n11. Create three columns of random numbers.\n12. Create a color column based on the index (help visualize the time series).\n13. Drop the index column.\n14. Rearrange the columns.\n15. Order the table by timestamp.\n16. Store the table in the on-disk database.\n17. Create a 3D line plot of the data.\n\n## Walking\n\nWe have a random cube of data:\n\n::: {#921b1a6e .cell execution_count=3}\n``` {.python .cell-code}\nt\n```\n\n::: {.cell-output .cell-output-display execution_count=2}\n```{=html}\n
┏━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┓\n┃ timestamp                color  a          b          c         ┃\n┡━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━┩\n│ timestamp(6)int64float64float64float64   │\n├─────────────────────────┼───────┼───────────┼───────────┼───────────┤\n│ 2024-07-23 23:35:06.0100-0.837407-0.6817160.692806 │\n│ 2024-07-23 23:35:07.01000.307479-0.923701-0.479673 │\n│ 2024-07-23 23:35:08.01000.1361950.6045830.078360 │\n│ 2024-07-23 23:35:09.0100-0.2618670.9252870.339049 │\n│ 2024-07-23 23:35:10.01000.8136230.255287-0.079172 │\n│  │\n└─────────────────────────┴───────┴───────────┴───────────┴───────────┘\n
\n```\n:::\n:::\n\n\nBut we need to make it [walk](https://en.wikipedia.org/wiki/Random_walk). We'll\nuse a window function to calculate the cumulative sum of each column:\n\n::: {.panel-tabset}\n\n## Without column selectors\n\n::: {#8e162bae .cell execution_count=4}\n``` {.python .cell-code}\nwindow = ibis.window(order_by=\"timestamp\", preceding=None, following=0)\nwalked = t.select(\n \"timestamp\",\n \"color\",\n a=t[\"a\"].sum().over(window),\n b=t[\"b\"].sum().over(window),\n c=t[\"c\"].sum().over(window),\n).order_by(\"timestamp\")\nwalked\n```\n\n::: {.cell-output .cell-output-display execution_count=3}\n```{=html}\n
┏━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┓\n┃ timestamp                color  a          b          c        ┃\n┡━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━┩\n│ timestamp(6)int64float64float64float64  │\n├─────────────────────────┼───────┼───────────┼───────────┼──────────┤\n│ 2024-07-23 23:35:06.0100-0.837407-0.6817160.692806 │\n│ 2024-07-23 23:35:07.0100-0.529928-1.6054170.213133 │\n│ 2024-07-23 23:35:08.0100-0.393733-1.0008340.291492 │\n│ 2024-07-23 23:35:09.0100-0.655600-0.0755470.630542 │\n│ 2024-07-23 23:35:10.01000.1580240.1797400.551369 │\n│  │\n└─────────────────────────┴───────┴───────────┴───────────┴──────────┘\n
\n```\n:::\n:::\n\n\n## With column selectors\n\n::: {#b7a45e7e .cell execution_count=5}\n``` {.python .cell-code}\nwindow = ibis.window(order_by=\"timestamp\", preceding=None, following=0)\nwalked = t.select(\n \"timestamp\",\n \"color\",\n s.across(\n s.c(\"a\", \"b\", \"c\"), # <1>\n ibis._.sum().over(window), # <2>\n ),\n).order_by(\"timestamp\")\nwalked\n```\n\n::: {.cell-output .cell-output-display execution_count=4}\n```{=html}\n
┏━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┓\n┃ timestamp                color  a          b          c        ┃\n┡━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━┩\n│ timestamp(6)int64float64float64float64  │\n├─────────────────────────┼───────┼───────────┼───────────┼──────────┤\n│ 2024-07-23 23:35:06.0100-0.837407-0.6817160.692806 │\n│ 2024-07-23 23:35:07.0100-0.529928-1.6054170.213133 │\n│ 2024-07-23 23:35:08.0100-0.393733-1.0008340.291492 │\n│ 2024-07-23 23:35:09.0100-0.655600-0.0755470.630542 │\n│ 2024-07-23 23:35:10.01000.1580240.1797400.551369 │\n│  │\n└─────────────────────────┴───────┴───────────┴───────────┴──────────┘\n
\n```\n:::\n:::\n\n\n1. Alternatively, you can use `s.of_type(float)` to select all float columns.\n2. Use the `ibis._` selector to reference a deferred column expression.\n\n:::\n\nWhile the first few rows may look similar to the cube, the 3D line plot does\nnot:\n\n::: {#09c4a94f .cell execution_count=6}\n``` {.python .cell-code code-fold=\"true\" code-summary=\"Show me the code!\"}\nc = px.line_3d(\n walked,\n x=\"a\",\n y=\"b\",\n z=\"c\",\n color=\"color\",\n hover_data=[\"timestamp\"],\n)\nc\n```\n\n::: {.cell-output .cell-output-display}\n```{=html}\n
\n```\n:::\n:::\n\n\n## Talking\n\nWe've made our random cube and we've made it walk, but now we want to make it\ntalk. At this point, you might be questioning the utility of this blog post --\nwhat are we doing and why? The purpose is to demonstrate generating synthetic\ndata that can look realistic. We achieve this by building in randomness (e.g. a\nrandom walk can be used to simulate stock prices) and also by using that\nrandomness to inform the generation of non-numeric synthetic data (e.g. the\nticker symbol of a stock).\n\n### Faking it\n\nLet's demonstrate this concept by pretending we have an application where users\ncan review a location they're at. The user's name, comment, location, and device\ninfo are stored in our database for their review at a given timestamp.\n\n[Faker](https://github.com/joke2k/faker) is a commonly used Python library for\ngenerating fake data. We'll use it to generate fake names, comments, locations,\nand device info for our reviews:\n\n::: {#67d9f452 .cell execution_count=7}\n``` {.python .cell-code}\nfrom faker import Faker\n\nfake = Faker()\n\nres = (\n fake.name(),\n fake.sentence(),\n fake.location_on_land(),\n fake.user_agent(),\n fake.ipv4(),\n)\nres\n```\n\n::: {.cell-output .cell-output-display execution_count=6}\n```\n('Robyn Foster',\n 'Employee security there meeting.',\n ('41.75338', '-86.11084', 'Granger', 'US', 'America/Indiana/Indianapolis'),\n 'Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; unm-US) AppleWebKit/533.16.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6533.16.7',\n '119.243.96.150')\n```\n:::\n:::\n\n\nWe can use our random numbers to influence the fake data generation in a Python\nUDF:\n\n\n\n::: {#907d2dc8 .cell execution_count=9}\n``` {.python .cell-code code-fold=\"true\" code-summary=\"Show me the code!\"}\nimport ibis.expr.datatypes as dt\n\nfrom datetime import datetime, timedelta\n\nibis.options.repr.interactive.max_length = 5\n\nrecord_schema = dt.Struct(\n {\n \"timestamp\": datetime,\n \"name\": str,\n \"comment\": str,\n \"location\": list[str],\n \"device\": dt.Struct(\n {\n \"browser\": str,\n \"ip\": str,\n }\n ),\n }\n)\n\n\n@ibis.udf.scalar.python\ndef faked_batch(\n timestamp: datetime,\n a: float,\n b: float,\n c: float,\n batch_size: int = 8,\n) -> dt.Array(record_schema):\n \"\"\"\n Generate records of fake data.\n \"\"\"\n value = (a + b + c) / 3\n\n res = [\n {\n \"timestamp\": timestamp + timedelta(seconds=0.1 * i),\n \"name\": fake.name() if value >= 0.5 else fake.first_name(),\n \"comment\": fake.sentence(),\n \"location\": fake.location_on_land(),\n \"device\": {\n \"browser\": fake.user_agent(),\n \"ip\": fake.ipv4() if value >= 0 else fake.ipv6(),\n },\n }\n for i in range(batch_size)\n ]\n\n return res\n\n\nif \"faked\" in con.list_tables():\n faked = con.table(\"faked\")\nelse:\n faked = (\n t.mutate(\n faked=faked_batch(t[\"timestamp\"], t[\"a\"], t[\"b\"], t[\"c\"]),\n )\n .select(\n \"a\",\n \"b\",\n \"c\",\n ibis._[\"faked\"].unnest(),\n )\n .unpack(\"faked\")\n .drop(\"a\", \"b\", \"c\")\n )\n\n faked = con.create_table(\"faked\", faked)\n\nfaked\n```\n\n::: {.cell-output .cell-output-display execution_count=8}\n```{=html}\n
┏━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓\n┃ timestamp                name    comment                                  location                                                          ┃\n┡━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩\n│ timestamp(6)stringstringarray<string>                                                     │\n├─────────────────────────┼────────┼─────────────────────────────────────────┼───────────────────────────────────────────────────────────────────┤\n│ 2024-07-23 23:35:06.010GlendaThan available eye.                    ['13.65805', '102.56365', 'Paoy Paet', 'KH', 'Asia/Phnom_Penh']   │\n│ 2024-07-23 23:35:06.110TrevorAbility commercial admit adult he.     ['56.9083', '60.8019', 'Beryozovsky', 'RU', 'Asia/Yekaterinburg'] │\n│ 2024-07-23 23:35:06.210Janet Sign fact time against energy.         ['25.66795', '85.83636', 'Dalsingh Sarai', 'IN', 'Asia/Kolkata']  │\n│ 2024-07-23 23:35:06.310AngelaHappen Democrat public office whatever.['45.78071', '12.84052', 'Portogruaro', 'IT', 'Europe/Rome']      │\n│ 2024-07-23 23:35:06.410Donna Travel none coach crime within lawyer. ['28.15112', '-82.46148', 'Lutz', 'US', 'America/New_York']       │\n│                                                                  │\n└─────────────────────────┴────────┴─────────────────────────────────────────┴───────────────────────────────────────────────────────────────────┘\n
\n```\n:::\n:::\n\n\nAnd now we have a \"realistic\" dataset of fake reviews matching our desired\nschema. You can adjust this to match the schema and expected distributions of\nyour own data and scale it up as needed.\n\n### GenAI/LLMs\n\nThe names and locations from Faker are bland and unrealistic. The comments are\nnonsensical. ~~And most importantly, we haven't filled our quota for blogs\nmentioning AI.~~ You could [use language models in Ibis UDFs to generate more\nrealistic synthetic data](../lms-for-data/index.qmd). We could use \"open source\"\nlanguage models to do this locally for free, an exercise left to the reader.\n\n## Next steps\n\nIf you've followed along, you have a `synthetic.ddb` file with a couple tables:\n\n::: {#80c2fa0f .cell execution_count=10}\n``` {.python .cell-code}\ncon.list_tables()\n```\n\n::: {.cell-output .cell-output-display execution_count=9}\n```\n['faked', 'source']\n```\n:::\n:::\n\n\nWe can estimate the size of data generated:\n\n::: {#eef14b3e .cell execution_count=11}\n``` {.python .cell-code}\nimport os\n\nsize_in_mbs = os.path.getsize(\"synthetic.ddb\") / (1024 * 1024)\nprint(f\"synthetic.ddb: {size_in_mbs:.2f} MBs\")\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nsynthetic.ddb: 54.51 MBs\n```\n:::\n:::\n\n\nYou can build from here to generate realistic synthetic data at any scale for\nany use case.\n\n", + "supporting": [ + "index_files/figure-html" + ], + "filters": [], + "includes": { + "include-in-header": [ + "\n\n\n\n\n" + ] + } + } +} \ No newline at end of file diff --git a/docs/posts/walking-talking-cube/index.qmd b/docs/posts/walking-talking-cube/index.qmd new file mode 100644 index 000000000000..7bb7dc1e22af --- /dev/null +++ b/docs/posts/walking-talking-cube/index.qmd @@ -0,0 +1,310 @@ +--- +title: "Taking a random cube for a walk and making it talk" +author: "Cody Peterson" +date: "2024-09-26" +image: thumbnail.png +categories: + - blog + - duckdb + - udfs +--- + +***Synthetic data with Ibis, DuckDB, Python UDFs, and Faker.*** + +To follow along, install the required libraries: + +```bash +pip install 'ibis-framework[duckdb]' faker plotly +``` + +## A random cube + +We'll generate a random cube of data with Ibis (default DuckDB backend) and +visualize it as a 3D line plot: + +```{python} +#| code-fold: true +#| code-summary: "Show me the code!" +import ibis # <1> +import ibis.selectors as s +import plotly.express as px # <1> + +ibis.options.interactive = True # <2> +ibis.options.repr.interactive.max_rows = 5 # <2> + +con = ibis.connect("duckdb://synthetic.ddb") # <3> + +if "source" in con.list_tables(): + t = con.table("source") # <4> +else: + lookback = ibis.interval(days=1) # <5> + step = ibis.interval(seconds=1) # <5> + + t = ( + ( + ibis.range( # <6> + ibis.now() - lookback, + ibis.now(), + step=step, + ) # <6> + .unnest() # <7> + .name("timestamp") # <8> + .as_table() # <9> + ) + .mutate( + index=(ibis.row_number().over(order_by="timestamp")), # <10> + **{col: 2 * (ibis.random() - 0.5) for col in ["a", "b", "c"]}, # <11> + ) + .mutate(color=ibis._["index"].histogram(nbins=8)) # <12> + .drop("index") # <13> + .relocate("timestamp", "color") # <14> + .order_by("timestamp") # <15> + ) + + t = con.create_table("source", t.to_pyarrow()) # <16> + +c = px.line_3d( # <17> + t, + x="a", + y="b", + z="c", + color="color", + hover_data=["timestamp"], +) # <17> +c +``` + +1. Import the necessary libraries. +2. Enable interactive mode for Ibis. +3. Connect to an on-disk DuckDB database. +4. Load the table if it already exists. +5. Define the time range and step for the data. +6. Create the array of timestamps. +7. Unnest the array to a column. +8. Name the column "timestamp". +9. Convert the column into a table. +10. Create a monotonically increasing index column. +11. Create three columns of random numbers. +12. Create a color column based on the index (help visualize the time series). +13. Drop the index column. +14. Rearrange the columns. +15. Order the table by timestamp. +16. Store the table in the on-disk database. +17. Create a 3D line plot of the data. + +## Walking + +We have a random cube of data: + +```{python} +t +``` + +But we need to make it [walk](https://en.wikipedia.org/wiki/Random_walk). We'll +use a window function to calculate the cumulative sum of each column: + +::: {.panel-tabset} + +## Without column selectors + +```{python} +window = ibis.window(order_by="timestamp", preceding=None, following=0) +walked = t.select( + "timestamp", + "color", + a=t["a"].sum().over(window), + b=t["b"].sum().over(window), + c=t["c"].sum().over(window), +).order_by("timestamp") +walked +``` + +## With column selectors + +```{python} +window = ibis.window(order_by="timestamp", preceding=None, following=0) +walked = t.select( + "timestamp", + "color", + s.across( + s.c("a", "b", "c"), # <1> + ibis._.sum().over(window), # <2> + ), +).order_by("timestamp") +walked +``` + +1. Alternatively, you can use `s.of_type(float)` to select all float columns. +2. Use the `ibis._` selector to reference a deferred column expression. + +::: + +While the first few rows may look similar to the cube, the 3D line plot does +not: + +```{python} +#| code-fold: true +#| code-summary: "Show me the code!" +c = px.line_3d( + walked, + x="a", + y="b", + z="c", + color="color", + hover_data=["timestamp"], +) +c +``` + +## Talking + +We've made our random cube and we've made it walk, but now we want to make it +talk. At this point, you might be questioning the utility of this blog post -- +what are we doing and why? The purpose is to demonstrate generating synthetic +data that can look realistic. We achieve this by building in randomness (e.g. a +random walk can be used to simulate stock prices) and also by using that +randomness to inform the generation of non-numeric synthetic data (e.g. the +ticker symbol of a stock). + +### Faking it + +Let's demonstrate this concept by pretending we have an application where users +can review a location they're at. The user's name, comment, location, and device +info are stored in our database for their review at a given timestamp. + +[Faker](https://github.com/joke2k/faker) is a commonly used Python library for +generating fake data. We'll use it to generate fake names, comments, locations, +and device info for our reviews: + +```{python} +from faker import Faker + +fake = Faker() + +res = ( + fake.name(), + fake.sentence(), + fake.location_on_land(), + fake.user_agent(), + fake.ipv4(), +) +res +``` + +We can use our random numbers to influence the fake data generation in a Python +UDF: + +```{python} +#| echo: false +#| code-fold: true +con.raw_sql("set enable_progress_bar = false;"); +``` + +```{python} +# | code-fold: true +# | code-summary: "Show me the code!" +import ibis.expr.datatypes as dt + +from datetime import datetime, timedelta + +ibis.options.repr.interactive.max_length = 5 + +record_schema = dt.Struct( + { + "timestamp": datetime, + "name": str, + "comment": str, + "location": list[str], + "device": dt.Struct( + { + "browser": str, + "ip": str, + } + ), + } +) + + +@ibis.udf.scalar.python +def faked_batch( + timestamp: datetime, + a: float, + b: float, + c: float, + batch_size: int = 8, +) -> dt.Array(record_schema): + """ + Generate records of fake data. + """ + value = (a + b + c) / 3 + + res = [ + { + "timestamp": timestamp + timedelta(seconds=0.1 * i), + "name": fake.name() if value >= 0.5 else fake.first_name(), + "comment": fake.sentence(), + "location": fake.location_on_land(), + "device": { + "browser": fake.user_agent(), + "ip": fake.ipv4() if value >= 0 else fake.ipv6(), + }, + } + for i in range(batch_size) + ] + + return res + + +if "faked" in con.list_tables(): + faked = con.table("faked") +else: + faked = ( + t.mutate( + faked=faked_batch(t["timestamp"], t["a"], t["b"], t["c"]), + ) + .select( + "a", + "b", + "c", + ibis._["faked"].unnest(), + ) + .unpack("faked") + .drop("a", "b", "c") + ) + + faked = con.create_table("faked", faked) + +faked +``` + +And now we have a "realistic" dataset of fake reviews matching our desired +schema. You can adjust this to match the schema and expected distributions of +your own data and scale it up as needed. + +### GenAI/LLMs + +The names and locations from Faker are bland and unrealistic. The comments are +nonsensical. ~~And most importantly, we haven't filled our quota for blogs +mentioning AI.~~ You could [use language models in Ibis UDFs to generate more +realistic synthetic data](../lms-for-data/index.qmd). We could use "open source" +language models to do this locally for free, an exercise left to the reader. + +## Next steps + +If you've followed along, you have a `synthetic.ddb` file with a couple tables: + +```{python} +con.list_tables() +``` + +We can estimate the size of data generated: + +```{python} +import os + +size_in_mbs = os.path.getsize("synthetic.ddb") / (1024 * 1024) +print(f"synthetic.ddb: {size_in_mbs:.2f} MBs") +``` + +You can build from here to generate realistic synthetic data at any scale for +any use case. diff --git a/docs/posts/walking-talking-cube/thumbnail.png b/docs/posts/walking-talking-cube/thumbnail.png new file mode 100644 index 000000000000..41175a12d53f Binary files /dev/null and b/docs/posts/walking-talking-cube/thumbnail.png differ