TCatalogObject {
  01: type (i32) = 3,
  02: catalog_version (i64) = 5994,
  05: table (struct) = TTable {
    01: db_name (string) = "default",
    02: tbl_name (string) = "orders",
    04: access_level (i32) = 1,
    05: columns (list) = list<struct>[21] {
      [0] = TColumn {
        01: columnName (string) = "rowid",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 5,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 4,
          02: max_size (i64) = 4,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 0,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 1,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [1] = TColumn {
        01: columnName (string) = "orderid",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 1,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 2,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [2] = TColumn {
        01: columnName (string) = "orderdate",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 9,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 4,
          02: max_size (i64) = 4,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 2,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 3,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [3] = TColumn {
        01: columnName (string) = "shipdate",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 9,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 4,
          02: max_size (i64) = 4,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 3,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 4,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [4] = TColumn {
        01: columnName (string) = "shipmode",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 4,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 5,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [5] = TColumn {
        01: columnName (string) = "customerid",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 5,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 6,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [6] = TColumn {
        01: columnName (string) = "customername",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 6,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 7,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [7] = TColumn {
        01: columnName (string) = "segment",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 7,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 8,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [8] = TColumn {
        01: columnName (string) = "country",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 8,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 9,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [9] = TColumn {
        01: columnName (string) = "city",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 9,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 10,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [10] = TColumn {
        01: columnName (string) = "state",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 10,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 11,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [11] = TColumn {
        01: columnName (string) = "postalcode",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 11,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 12,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [12] = TColumn {
        01: columnName (string) = "region",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 12,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 13,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [13] = TColumn {
        01: columnName (string) = "productid",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 13,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 14,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [14] = TColumn {
        01: columnName (string) = "category",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 14,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 15,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [15] = TColumn {
        01: columnName (string) = "subcategory",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 15,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 16,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [16] = TColumn {
        01: columnName (string) = "productname",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 16,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 17,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [17] = TColumn {
        01: columnName (string) = "sales",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 14,
                03: precision (i32) = 25,
                04: scale (i32) = 10,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 16,
          02: max_size (i64) = 16,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 17,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 18,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [18] = TColumn {
        01: columnName (string) = "quantity",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 5,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 4,
          02: max_size (i64) = 4,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 18,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 19,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [19] = TColumn {
        01: columnName (string) = "discount",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 14,
                03: precision (i32) = 25,
                04: scale (i32) = 10,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 16,
          02: max_size (i64) = 16,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 19,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 20,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
      [20] = TColumn {
        01: columnName (string) = "profit",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 14,
                03: precision (i32) = 25,
                04: scale (i32) = 10,
              },
            },
          },
        },
        03: comment (string) = "Inferred from Parquet file.",
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 16,
          02: max_size (i64) = 16,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 20,
        06: virtual_column_type (i32) = 0,
        14: is_nullable (bool) = true,
        20: is_iceberg_column (bool) = true,
        21: iceberg_field_id (i32) = 21,
        22: iceberg_field_map_key_id (i32) = -1,
        23: iceberg_field_map_value_id (i32) = -1,
      },
    },
    06: clustering_columns (list) = list<struct>[0] {
    },
    07: virtual_columns (list) = list<struct>[5] {
      [0] = TColumn {
        01: columnName (string) = "input__file__name",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 12,
              },
            },
          },
        },
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 0,
        06: virtual_column_type (i32) = 1,
      },
      [1] = TColumn {
        01: columnName (string) = "file__position",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 6,
              },
            },
          },
        },
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 8,
          02: max_size (i64) = 8,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 0,
        06: virtual_column_type (i32) = 2,
      },
      [2] = TColumn {
        01: columnName (string) = "partition__spec__id",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 5,
              },
            },
          },
        },
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 4,
          02: max_size (i64) = 4,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 0,
        06: virtual_column_type (i32) = 3,
      },
      [3] = TColumn {
        01: columnName (string) = "iceberg__partition__serialized",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 13,
              },
            },
          },
        },
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = -1,
          02: max_size (i64) = -1,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 0,
        06: virtual_column_type (i32) = 4,
      },
      [4] = TColumn {
        01: columnName (string) = "iceberg__data__sequence__number",
        02: columnType (struct) = TColumnType {
          01: types (list) = list<struct>[1] {
            [0] = TTypeNode {
              01: type (i32) = 0,
              02: scalar_type (struct) = TScalarType {
                01: type (i32) = 6,
              },
            },
          },
        },
        04: col_stats (struct) = TColumnStats {
          01: avg_size (double) = 8,
          02: max_size (i64) = 8,
          03: num_distinct_values (i64) = -1,
          04: num_nulls (i64) = -1,
          05: num_trues (i64) = -1,
          06: num_falses (i64) = -1,
        },
        05: position (i32) = 0,
        06: virtual_column_type (i32) = 5,
      },
    },
    08: table_stats (struct) = TTableStats {
      01: num_rows (i64) = 30009,
      02: total_file_bytes (i64) = 1194216,
    },
    09: table_type (i32) = 5,
    10: hdfs_table (struct) = THdfsTable {
      01: hdfsBaseDir (string) = "s3a://novas3devcontainer/user/hive/warehouse/orders",
      02: colNames (list) = list<string>[21] {
        [0] = "rowid",
        [1] = "orderid",
        [2] = "orderdate",
        [3] = "shipdate",
        [4] = "shipmode",
        [5] = "customerid",
        [6] = "customername",
        [7] = "segment",
        [8] = "country",
        [9] = "city",
        [10] = "state",
        [11] = "postalcode",
        [12] = "region",
        [13] = "productid",
        [14] = "category",
        [15] = "subcategory",
        [16] = "productname",
        [17] = "sales",
        [18] = "quantity",
        [19] = "discount",
        [20] = "profit",
      },
      03: nullPartitionKeyValue (string) = "__HIVE_DEFAULT_PARTITION__",
      04: partitions (map) = map<i64,struct>[1] {
        4 -> THdfsPartition {
          07: partitionKeyExprs (list) = list<struct>[0] {
          },
          09: file_desc (list) = list<struct>[9] {
            [0] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x13\x99\x05\x00\x00\x00\x00\x00\x04\x00\x00\x00=\x00\x00\x00data/664716f0f1fa1b63-718da08d00000001_1212524512_data.0.parq\x00\x00\x00",
            },
            [1] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x1c\x00\x18\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x004!\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00;\x00\x00\x00data/b141407e8d490495-e20fab9b00000000_12528160_data.0.parq\x00",
            },
            [2] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xa8\x98\x05\x00\x00\x00\x00\x00\x04\x00\x00\x00<\x00\x00\x00data/6b4738a51f296f53-476ff4d500000001_473329991_data.0.parq\x00\x00\x00\x00",
            },
            [3] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x04\x99\x05\x00\x00\x00\x00\x00\x04\x00\x00\x00<\x00\x00\x00data/6d434e993480c058-e6c16eba00000000_330398265_data.0.parq\x00\x00\x00\x00",
            },
            [4] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xbc\x1d\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00=\x00\x00\x00data/d240b8c55eab283b-cfadfe3000000001_1266820881_data.0.parq\x00\x00\x00",
            },
            [5] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x1c\x00\x18\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00B\x00\x00\x00data/delete-6b4738a51f296f53-476ff4d500000001_34379510_data.0.parq\x00\x00",
            },
            [6] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x87\x05\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00D\x00\x00\x00data/delete-b141407e8d490495-e20fab9b00000000_1219126561_data.0.parq\x00\x00\x00\x00",
            },
            [7] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'\x92\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00D\x00\x00\x00data/delete-664716f0f1fa1b63-718da08d00000001_1672365893_data.0.parq\x00\x00\x00\x00",
            },
            [8] = THdfsFileDesc {
              01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00d\x05\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00D\x00\x00\x00data/delete-d240b8c55eab283b-cfadfe3000000001_1857115515_data.0.parq\x00\x00\x00\x00",
            },
          },
          10: location (struct) = THdfsPartitionLocation {
            01: prefix_index (i32) = 0,
            02: suffix (string) = "",
          },
          11: access_level (i32) = 1,
          12: stats (struct) = TTableStats {
            01: num_rows (i64) = 30009,
          },
          13: is_marked_cached (bool) = false,
          14: id (i64) = 4,
          15: hms_parameters (map) = map<string,string>[0] {
          },
          16: num_blocks (i64) = 0,
          17: total_file_size_bytes (i64) = 1194216,
          19: has_incremental_stats (bool) = false,
          25: partition_name (string) = "",
          26: prev_id (i64) = -1,
          27: hdfs_storage_descriptor (struct) = THdfsStorageDescriptor {
            01: lineDelim (byte) = 0x0a,
            02: fieldDelim (byte) = 0x01,
            03: collectionDelim (byte) = 0x01,
            04: mapKeyDelim (byte) = 0x01,
            05: escapeChar (byte) = 0x00,
            06: quoteChar (byte) = 0x01,
            07: fileFormat (i32) = 4,
            08: blockSize (i32) = 0,
          },
        },
      },
      05: nullColumnValue (string) = "\\N",
      07: network_addresses (list) = list<struct>[0] {
      },
      09: partition_prefixes (list) = list<string>[1] {
        [0] = "s3a://novas3devcontainer/user/hive/warehouse/orders",
      },
      10: prototype_partition (struct) = THdfsPartition {
        07: partitionKeyExprs (list) = list<struct>[0] {
        },
        14: id (i64) = -1,
        26: prev_id (i64) = -1,
        27: hdfs_storage_descriptor (struct) = THdfsStorageDescriptor {
          01: lineDelim (byte) = 0x0a,
          02: fieldDelim (byte) = 0x01,
          03: collectionDelim (byte) = 0x01,
          04: mapKeyDelim (byte) = 0x01,
          05: escapeChar (byte) = 0x00,
          06: quoteChar (byte) = 0x01,
          07: fileFormat (i32) = 8,
          08: blockSize (i32) = 0,
        },
      },
      11: sql_constraints (struct) = TSqlConstraints {
        01: primary_keys (list) = list<struct>[0] {
        },
        02: foreign_keys (list) = list<struct>[0] {
        },
      },
      12: is_full_acid (bool) = false,
      14: has_full_partitions (bool) = true,
    },
    12: metastore_table (struct) = Table {
      01: tableName (string) = "orders",
      02: dbName (string) = "default",
      03: owner (string) = "psantos",
      04: createTime (i32) = 1756200607,
      05: lastAccessTime (i32) = -441016,
      06: retention (i32) = 2147483647,
      07: sd (struct) = StorageDescriptor {
        01: cols (list) = list<struct>[21] {
          [0] = FieldSchema {
            01: name (string) = "rowid",
            02: type (string) = "int",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [1] = FieldSchema {
            01: name (string) = "orderid",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [2] = FieldSchema {
            01: name (string) = "orderdate",
            02: type (string) = "date",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [3] = FieldSchema {
            01: name (string) = "shipdate",
            02: type (string) = "date",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [4] = FieldSchema {
            01: name (string) = "shipmode",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [5] = FieldSchema {
            01: name (string) = "customerid",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [6] = FieldSchema {
            01: name (string) = "customername",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [7] = FieldSchema {
            01: name (string) = "segment",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [8] = FieldSchema {
            01: name (string) = "country",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [9] = FieldSchema {
            01: name (string) = "city",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [10] = FieldSchema {
            01: name (string) = "state",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [11] = FieldSchema {
            01: name (string) = "postalcode",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [12] = FieldSchema {
            01: name (string) = "region",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [13] = FieldSchema {
            01: name (string) = "productid",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [14] = FieldSchema {
            01: name (string) = "category",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [15] = FieldSchema {
            01: name (string) = "subcategory",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [16] = FieldSchema {
            01: name (string) = "productname",
            02: type (string) = "string",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [17] = FieldSchema {
            01: name (string) = "sales",
            02: type (string) = "decimal(25,10)",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [18] = FieldSchema {
            01: name (string) = "quantity",
            02: type (string) = "int",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [19] = FieldSchema {
            01: name (string) = "discount",
            02: type (string) = "decimal(25,10)",
            03: comment (string) = "Inferred from Parquet file.",
          },
          [20] = FieldSchema {
            01: name (string) = "profit",
            02: type (string) = "decimal(25,10)",
            03: comment (string) = "Inferred from Parquet file.",
          },
        },
        02: location (string) = "s3a://novas3devcontainer/user/hive/warehouse/orders",
        03: inputFormat (string) = "org.apache.iceberg.mr.hive.HiveIcebergInputFormat",
        04: outputFormat (string) = "org.apache.iceberg.mr.hive.HiveIcebergOutputFormat",
        05: compressed (bool) = false,
        06: numBuckets (i32) = 0,
        07: serdeInfo (struct) = SerDeInfo {
          01: name (string) = "",
          02: serializationLib (string) = "org.apache.iceberg.mr.hive.HiveIcebergSerDe",
          03: parameters (map) = map<string,string>[0] {
          },
        },
        08: bucketCols (list) = list<string>[0] {
        },
        09: sortCols (list) = list<struct>[0] {
        },
        10: parameters (map) = map<string,string>[0] {
        },
        11: skewedInfo (struct) = SkewedInfo {
          01: skewedColNames (list) = list<string>[0] {
          },
          02: skewedColValues (list) = list<list>[0] {
          },
          03: skewedColValueLocationMaps (map) = map<list,string>[0] {
          },
        },
        12: storedAsSubDirectories (bool) = false,
      },
      08: partitionKeys (list) = list<struct>[0] {
      },
      09: parameters (map) = map<string,string>[26] {
        "EXTERNAL" -> "TRUE",
        "OBJCAPABILITIES" -> "EXTREAD,EXTWRITE",
        "accessType" -> "8",
        "current-schema" -> "{\"type\":\"struct\"[...](2083)",
        "current-snapshot-id" -> "8925916327827828503",
        "current-snapshot-summary" -> "{\"added-data-fil[...](359)",
        "current-snapshot-timestamp-ms" -> "1756201845109",
        "engine.hive.enabled" -> "true",
        "external.table.purge" -> "TRUE",
        "impala.events.catalogServiceId" -> "a7019bacd71d4819:a17d009675d81858",
        "impala.events.catalogVersion" -> "110",
        "metadata_location" -> "s3a://novas3devcontainer/user/hive/warehouse/orders/metadata/00005-8bc224c4-3ebb-4904-b94a-1a8581c5e273.metadata.json",
        "numFiles" -> "5",
        "numRows" -> "30009",
        "previous_metadata_location" -> "s3a://novas3devcontainer/user/hive/warehouse/orders/metadata/00004-3de96d0c-b7b7-458f-a20a-2fbcb0d235ed.metadata.json",
        "snapshot-count" -> "5",
        "storage_handler" -> "org.apache.iceberg.mr.hive.HiveIcebergStorageHandler",
        "table_type" -> "ICEBERG",
        "totalSize" -> "1194216",
        "transient_lastDdlTime" -> "1756200607",
        "uuid" -> "9a9e1722-9b54-491d-9063-48e826a43c49",
        "write.delete.mode" -> "merge-on-read",
        "write.format.default" -> "parquet",
        "write.merge.mode" -> "merge-on-read",
        "write.parquet.compression-codec" -> "zstd",
        "write.update.mode" -> "merge-on-read",
      },
      10: viewOriginalText (string) = "",
      11: viewExpandedText (string) = "",
      12: tableType (string) = "EXTERNAL_TABLE",
      14: temporary (bool) = false,
      15: rewriteEnabled (bool) = false,
      17: catName (string) = "hive",
      18: ownerType (i32) = 1,
    },
    16: storage_metadata_load_time_ns (i64) = 582280592,
    17: iceberg_table (struct) = TIcebergTable {
      01: table_location (string) = "s3a://novas3devcontainer/user/hive/warehouse/orders",
      02: partition_spec (list) = list<struct>[1] {
        [0] = TIcebergPartitionSpec {
          01: spec_id (i32) = 0,
        },
      },
      03: default_partition_spec_id (i32) = 0,
      04: content_files (struct) = TIcebergContentFileStore {
        01: path_hash_to_data_file_without_deletes (map) = map<string,struct>[1] {
          "ba063ffb541532586b977f0bd1f6187a" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x13\x99\x05\x00\x00\x00\x00\x00\x04\x00\x00\x00=\x00\x00\x00data/664716f0f1fa1b63-718da08d00000001_1212524512_data.0.parq\x00\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\n'\x00\x00\x00\x00\x00\x00",
          },
        },
        02: path_hash_to_data_file_with_deletes (map) = map<string,struct>[4] {
          "9b79f935c75a5e97f28ec8666ef89485" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xa8\x98\x05\x00\x00\x00\x00\x00\x04\x00\x00\x00<\x00\x00\x00data/6b4738a51f296f53-476ff4d500000001_473329991_data.0.parq\x00\x00\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\n'\x00\x00\x00\x00\x00\x00",
          },
          "9cb2f3ace33e1deb6f90bb406fa1255d" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x04\x99\x05\x00\x00\x00\x00\x00\x04\x00\x00\x00<\x00\x00\x00data/6d434e993480c058-e6c16eba00000000_330398265_data.0.parq\x00\x00\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\n'\x00\x00\x00\x00\x00\x00",
          },
          "a607e1a24ff61669c760d349bc05ccff" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x1c\x00\x18\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x004!\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00;\x00\x00\x00data/b141407e8d490495-e20fab9b00000000_12528160_data.0.parq\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00",
          },
          "cb01b71a931a7368978eac3e6ac1edfb" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xbc\x1d\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00=\x00\x00\x00data/d240b8c55eab283b-cfadfe3000000001_1266820881_data.0.parq\x00\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00\x00\x00\x00\x00",
          },
        },
        03: path_hash_to_position_delete_file (map) = map<string,struct>[4] {
          "a52d37663622898c388a057b537b4eae" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x1c\x00\x18\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00B\x00\x00\x00data/delete-6b4738a51f296f53-476ff4d500000001_34379510_data.0.parq\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\n'\x00\x00\x00\x00\x00\x00",
          },
          "aedd36e0e28762ec0ba22b6f85ed0456" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00d\x05\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00D\x00\x00\x00data/delete-d240b8c55eab283b-cfadfe3000000001_1857115515_data.0.parq\x00\x00\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00\x00\x00\x00\x00",
          },
          "b473e76d08abe4f9ea41fcd024f9d156" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x87\x05\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00D\x00\x00\x00data/delete-b141407e8d490495-e20fab9b00000000_1219126561_data.0.parq\x00\x00\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00",
          },
          "bf45cccb8775d911f0928be55e484467" -> THdfsFileDesc {
            01: file_desc_data (string) = "\x14\x00\x00\x00\x00\x00\x00\x00\f\x00\x18\x00\x14\x00\f\x00\x00\x00\x04\x00\f\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'\x92\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00D\x00\x00\x00data/delete-664716f0f1fa1b63-718da08d00000001_1672365893_data.0.parq\x00\x00\x00\x00",
            02: file_metadata (string) = "\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\n\x00\x04\x00\x06\x00\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x14\x00\x00\x00\f\x00\x04\x00\n\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\n'\x00\x00\x00\x00\x00\x00",
          },
        },
        04: path_hash_to_equality_delete_file (map) = map<string,struct>[0] {
        },
        05: has_avro (bool) = false,
        06: has_orc (bool) = false,
        07: has_parquet (bool) = true,
      },
      05: catalog_snapshot_id (i64) = 8925916327827828503,
      06: parquet_compression_codec (struct) = TCompressionCodec {
        01: codec (i32) = 10,
        02: compression_level (i32) = 3,
      },
      07: parquet_row_group_size (i64) = 0,
      08: parquet_plain_page_size (i64) = 0,
      09: parquet_dict_page_size (i64) = 0,
      10: partition_stats (map) = map<string,struct>[1] {
        "{}" -> TIcebergPartitionStats {
          01: num_files (i64) = 9,
          02: num_rows (i64) = 30009,
          03: file_size_in_bytes (i64) = 1194216,
        },
      },
    },
  },
}