Stuck on same bug, exhausting tokens

The file is 900 lines of code. The llm is stuck on this error, writing the file multiple times.

It’s hard to say what’s going on here, but if it’s churning on the same file it’s a couple different LLMs that are failing. You can git revert to last working state (git log) and then git reset --hard or you can fixup the file yourself if it’s something relatively minor. Chances are a heredoc got messed up or some other nested delimiter issue. Have you tried fixing up the file yourself? Can you share the file?

I fixed it manually. The error was in log_product_changes

defmodule ShopifyScraper.ShopifySync do
  @moduledoc """
  Context for syncing Shopify store data with comprehensive change tracking.
  Handles smart sync logic: insert new products, update changed products,
  soft delete missing products, sync collections, and log all changes for alerts.
  """

  import Ecto.Query
  alias ShopifyScraper.{Repo, Store, Product, ProductChange, Alert, Collection, ProductCollection}
  alias ShopSearch

  # Process products in batches to avoid long transactions
  @batch_size 50
  # Process collections in smaller batches
  @collection_batch_size 10
  # Timeout for individual HTTP requests (in milliseconds)
  @http_timeout 30_000

  @doc """
  Sync a store's products and optionally collections from Shopify
  Returns {:ok, sync_stats} or {:error, reason}
  """
  def sync_store(%Store{} = store) do
    try do
      # Update store sync status (short transaction)
      update_store_sync_status(store, "syncing")

      # Check if this is the first sync for this store
      is_first_sync = is_first_store_sync?(store)

      # Fetch ALL products from Shopify with pagination (no transaction)
      case fetch_all_shopify_products(store.url) do
        {:ok, shopify_products} ->
          # Broadcast initial sync progress
          broadcast_sync_progress(store.id, %{
            status: "processing",
            total_products: length(shopify_products),
            processed_products: 0,
            current_product_count: get_active_product_count(store.id),
            collection_count: get_active_collection_count(store.id),
            phase: "products"
          })

          # Get existing products for comparison - use handle as unique key
          existing_products = get_existing_products(store.id)
          existing_by_handle = Map.new(existing_products, &{&1.handle, &1})

          # Track sync statistics
          stats = %{
            total_fetched: length(shopify_products),
            new_products: 0,
            updated_products: 0,
            deleted_products: 0,
            changes_logged: 0,
            alerts_generated: 0,
            processed_products: 0,
            collections_synced: 0,
            collection_links_created: 0,
            collection_links_deleted: 0
          }

          # Process products in batches with progress updates
          stats =
            process_products_in_batches(
              store,
              shopify_products,
              existing_by_handle,
              stats,
              is_first_sync
            )

          # Handle missing products (soft delete) - compare by handle
          fetched_handles =
            shopify_products
            |> Enum.map(&(&1["handle"] || &1[:handle]))
            |> Enum.filter(&(&1 != nil))
            |> MapSet.new()

          missing_products =
            Enum.reject(existing_products, &MapSet.member?(fetched_handles, &1.handle))

          # Process deletions in batches too (only generate alerts if not first sync)
          stats = delete_products_in_batches(missing_products, stats, is_first_sync)

          # Sync collections if enabled for this store
          stats =
            if store.sync_collections do
              sync_store_collections(store, stats, is_first_sync)
            else
              stats
            end

          # Update store with final stats (short transaction)
          update_store_sync_completion(store, stats)

          # Broadcast completion
          broadcast_sync_progress(store.id, %{
            status: "completed",
            total_products: stats.total_fetched,
            processed_products: stats.total_fetched,
            current_product_count: get_active_product_count(store.id),
            collection_count: get_active_collection_count(store.id),
            collections_synced: stats.collections_synced,
            stats: stats
          })

          {:ok, stats}

        {:error, reason} ->
          update_store_sync_status(store, "error", format_error_message(reason))

          # Broadcast error
          broadcast_sync_progress(store.id, %{
            status: "error",
            error: format_error_message(reason)
          })

          {:error, reason}
      end
    rescue
      e ->
        update_store_sync_status(store, "error", Exception.message(e))

        # Broadcast error
        broadcast_sync_progress(store.id, %{
          status: "error",
          error: Exception.message(e)
        })

        {:error, e}
    end
  end

  defp sync_store_collections(store, stats, is_first_sync) do
    # Broadcast collections sync start
    broadcast_sync_progress(store.id, %{
      status: "processing",
      phase: "collections",
      current_product_count: get_active_product_count(store.id)
    })

    shop_search = ShopSearch.new(store.url)

    case fetch_all_shopify_collections(shop_search) do
      {:ok, shopify_collections} ->
        # Process collections in batches
        collection_stats =
          process_collections_in_batches(store, shopify_collections, is_first_sync)

        # Sync collection-product relationships
        relationship_stats =
          sync_collection_product_relationships(store, shopify_collections, shop_search)

        # Merge stats
        %{
          stats
          | collections_synced: collection_stats.collections_synced,
            collection_links_created: relationship_stats.links_created,
            collection_links_deleted: relationship_stats.links_deleted
        }

      {:error, reason} ->
        # Log collection sync error but don't fail the entire sync
        IO.puts("Collection sync failed for store #{store.id}: #{inspect(reason)}")
        stats
    end
  end

  defp fetch_all_shopify_collections(shop_search) do
    case ShopSearch.get_all_collections(shop_search) do
      {:ok, collections} when is_list(collections) ->
        {:ok, collections}

      {:ok, _} ->
        {:error, "Invalid response format from get_all_collections"}

      {:error, reason} ->
        {:error, reason}
    end
  end

  defp process_collections_in_batches(store, shopify_collections, is_first_sync) do
    # Get existing collections for comparison
    existing_collections = get_existing_collections(store.id)
    existing_by_handle = Map.new(existing_collections, &{&1.handle, &1})

    stats = %{collections_synced: 0}

    shopify_collections
    |> Enum.chunk_every(@collection_batch_size)
    |> Enum.reduce(stats, fn batch, acc ->
      # Process each batch in its own transaction
      batch_result =
        Repo.transaction(fn ->
          Enum.reduce(batch, acc, fn shopify_collection, batch_acc ->
            process_collection(
              store,
              shopify_collection,
              existing_by_handle,
              batch_acc,
              is_first_sync
            )
          end)
        end)

      case batch_result do
        {:ok, batch_stats} -> batch_stats
        {:error, _} -> acc
      end
    end)
  end

  defp process_collection(store, shopify_collection, existing_by_handle, stats, _is_first_sync) do
    handle = shopify_collection[:handle] || shopify_collection["handle"]
    shopify_id = to_string(shopify_collection[:id] || shopify_collection["id"])

    collection_attrs = %{
      store_id: store.id,
      shopify_id: shopify_id,
      title: shopify_collection[:title] || shopify_collection["title"] || "Untitled Collection",
      handle: handle,
      description: shopify_collection[:description] || shopify_collection["description"],
      image_url: extract_collection_image_url(shopify_collection),
      published: shopify_collection[:published_at] != nil,
      sort_order: shopify_collection[:sort_order] || shopify_collection["sort_order"],
      created_at_shopify: parse_shopify_datetime(shopify_collection[:created_at]),
      updated_at_shopify: parse_shopify_datetime(shopify_collection[:updated_at])
    }

    case Map.get(existing_by_handle, handle) do
      nil ->
        # New collection - insert
        case Collection.changeset(%Collection{}, collection_attrs) |> Repo.insert() do
          {:ok, _collection} ->
            %{stats | collections_synced: stats.collections_synced + 1}

          {:error, _changeset} ->
            stats
        end

      existing_collection ->
        # Existing collection - check for changes and update
        changeset = Collection.changeset(existing_collection, collection_attrs)

        if changeset.changes != %{} do
          case Repo.update(changeset) do
            {:ok, _updated_collection} ->
              %{stats | collections_synced: stats.collections_synced + 1}

            {:error, _changeset} ->
              stats
          end
        else
          stats
        end
    end
  end

  defp sync_collection_product_relationships(store, shopify_collections, shop_search) do
    # Get all existing products for this store (we'll need to match by handle)
    existing_products = get_existing_products(store.id)
    existing_products_by_handle = Map.new(existing_products, &{&1.handle, &1})

    # Get all existing collections for this store
    existing_collections = get_existing_collections(store.id)
    existing_collections_by_handle = Map.new(existing_collections, &{&1.handle, &1})

    # Clear all existing product-collection links for this store
    # We'll rebuild them from scratch to handle deletions
    clear_store_collection_links(store.id)

    stats = %{links_created: 0, links_deleted: 0}

    # Process each collection and its products
    shopify_collections
    |> Enum.reduce(stats, fn shopify_collection, acc ->
      handle = shopify_collection[:handle] || shopify_collection["handle"]

      case Map.get(existing_collections_by_handle, handle) do
        nil ->
          # Collection doesn't exist in our DB, skip
          acc

        collection ->
          # Fetch products for this collection
          case fetch_collection_products_paginated(shop_search, handle) do
            {:ok, collection_products} ->
              links_created =
                create_collection_product_links(
                  collection,
                  collection_products,
                  existing_products_by_handle
                )

              %{acc | links_created: acc.links_created + links_created}

            {:error, _reason} ->
              # Skip this collection if we can't fetch its products
              acc
          end
      end
    end)
  end

  defp fetch_collection_products_paginated(shop_search, collection_handle) do
    fetch_collection_products_recursive(shop_search, collection_handle, [], 1)
  end

  defp fetch_collection_products_recursive(
         shop_search,
         collection_handle,
         accumulated_products,
         page
       ) do
    case ShopSearch.get_collection_products_paginated(shop_search, collection_handle, %{
           page: page,
           limit: 50,
           timeout: @http_timeout
         }) do
      {:ok, products} when is_list(products) and length(products) > 0 ->
        new_accumulated = accumulated_products ++ products
        # Be nice to the server
        Process.sleep(100)

        fetch_collection_products_recursive(
          shop_search,
          collection_handle,
          new_accumulated,
          page + 1
        )

      {:ok, []} ->
        # Empty list means no more products
        {:ok, accumulated_products}

      {:ok, products} when is_list(products) ->
        # Non-empty list but might be last page
        {:ok, accumulated_products ++ products}

      {:error, reason} ->
        if page == 1 do
          # Error on first page is a real error
          {:error, reason}
        else
          # Error on subsequent pages might mean we're done
          {:ok, accumulated_products}
        end
    end
  end

  defp create_collection_product_links(
         collection,
         collection_products,
         existing_products_by_handle
       ) do
    junction_entries =
      collection_products
      |> Enum.with_index()
      |> Enum.map(fn {product, index} ->
        handle = product["handle"] || product[:handle]

        case Map.get(existing_products_by_handle, handle) do
          nil ->
            # Product doesn't exist in our DB, skip
            nil

          existing_product ->
            %{
              product_id: existing_product.id,
              collection_id: collection.id,
              position: index,
              inserted_at: NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second),
              updated_at: NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second)
            }
        end
      end)
      |> Enum.reject(&is_nil/1)

    if length(junction_entries) > 0 do
      # Bulk insert junction entries
      {count, _} = Repo.insert_all(ProductCollection, junction_entries)
      count
    else
      0
    end
  end

  defp clear_store_collection_links(store_id) do
    # Delete all product-collection links for products belonging to this store
    # Use subquery instead of JOIN for SQLite compatibility
    product_ids = from(p in Product, where: p.store_id == ^store_id, select: p.id) |> Repo.all()

    from(pc in ProductCollection, where: pc.product_id in ^product_ids)
    |> Repo.delete_all()
  end

  defp get_existing_collections(store_id) do
    from(c in Collection,
      where: c.store_id == ^store_id,
      select: c
    )
    |> Repo.all()
  end

  defp extract_collection_image_url(shopify_collection) do
    cond do
      image = shopify_collection["image"] ->
        image["src"] || image[:src]

      image = shopify_collection[:image] ->
        image["src"] || image[:src]

      true ->
        nil
    end
  end

  defp is_first_store_sync?(%Store{last_synced_at: nil}), do: true
  defp is_first_store_sync?(_store), do: false

  defp process_products_in_batches(
         store,
         shopify_products,
         existing_by_handle,
         initial_stats,
         is_first_sync
       ) do
    shopify_products
    |> Enum.chunk_every(@batch_size)
    |> Enum.with_index()
    |> Enum.reduce(initial_stats, fn {batch, batch_index}, stats ->
      # Process each batch in its own transaction
      batch_result =
        Repo.transaction(fn ->
          Enum.reduce(batch, stats, fn shopify_product, acc ->
            process_product(store, shopify_product, existing_by_handle, acc, is_first_sync)
          end)
        end)

      case batch_result do
        {:ok, batch_stats} ->
          # Update processed count
          updated_stats = %{
            batch_stats
            | processed_products: batch_stats.processed_products + length(batch)
          }

          # Broadcast progress after each batch
          current_count = get_active_product_count(store.id)

          progress_percent =
            round(updated_stats.processed_products / updated_stats.total_fetched * 100)

          broadcast_sync_progress(store.id, %{
            status: "processing",
            phase: "products",
            total_products: updated_stats.total_fetched,
            processed_products: updated_stats.processed_products,
            current_product_count: current_count,
            progress_percent: progress_percent,
            batch_number: batch_index + 1,
            total_batches: ceil(updated_stats.total_fetched / @batch_size)
          })

          updated_stats

        # Continue with previous stats if batch fails
        {:error, _} ->
          %{stats | processed_products: stats.processed_products + length(batch)}
      end
    end)
  end

  defp delete_products_in_batches(missing_products, initial_stats, is_first_sync) do
    missing_products
    |> Enum.chunk_every(@batch_size)
    |> Enum.reduce(initial_stats, fn batch, stats ->
      # Process each deletion batch in its own transaction
      Repo.transaction(fn ->
        Enum.reduce(batch, stats, fn product, acc ->
          soft_delete_product(product, acc, is_first_sync)
        end)
      end)
      |> case do
        {:ok, batch_stats} -> batch_stats
        # Continue with previous stats if batch fails
        {:error, _} -> stats
      end
    end)
  end

  defp fetch_all_shopify_products(store_url) do
    shop_search = ShopSearch.new(store_url)
    fetch_products_recursive(shop_search, [], 1)
  end

  defp fetch_products_recursive(shop_search, accumulated_products, page) do
    # Add timeout and error handling for HTTP requests
    case ShopSearch.get_products(shop_search, %{page: page, timeout: @http_timeout}) do
      {:ok, products} when is_list(products) and length(products) > 0 ->
        # Got products, accumulate them and fetch next page
        new_accumulated = accumulated_products ++ products

        # Add a small delay to be nice to the server
        Process.sleep(100)

        fetch_products_recursive(shop_search, new_accumulated, page + 1)

      {:ok, []} ->
        # Empty list means no more products, return accumulated
        {:ok, accumulated_products}

      {:ok, products} when is_list(products) ->
        # Non-empty list but might be last page
        new_accumulated = accumulated_products ++ products

        # If we got less than a full page (typically 30), we're probably done
        # But let's try one more page to be sure
        Process.sleep(100)

        case ShopSearch.get_products(shop_search, %{page: page + 1, timeout: @http_timeout}) do
          {:ok, []} ->
            {:ok, new_accumulated}

          {:ok, more_products} when is_list(more_products) ->
            # More products found, continue recursively
            fetch_products_recursive(shop_search, new_accumulated ++ more_products, page + 2)

          {:error, _} ->
            # Error on next page probably means we're done
            {:ok, new_accumulated}
        end

      {:ok, _} ->
        {:error, "Invalid response format from ShopSearch"}

      {:error, reason} ->
        if page == 1 do
          # Error on first page is a real error
          {:error, reason}
        else
          # Error on subsequent pages might mean we're done
          # Return what we've accumulated so far
          {:ok, accumulated_products}
        end
    end
  end

  defp get_existing_products(store_id) do
    from(p in Product,
      where: p.store_id == ^store_id and is_nil(p.deleted_at),
      select: p
    )
    |> Repo.all()
  end

  defp process_product(store, shopify_product, existing_by_handle, stats, is_first_sync) do
    shopify_id =
      to_string(shopify_product["id"] || shopify_product[:id] || shopify_product[:platform_id])

    handle = shopify_product["handle"] || shopify_product[:handle]

    now = DateTime.utc_now()

    product_attrs = %{
      store_id: store.id,
      title: shopify_product["title"] || shopify_product[:title] || "Untitled Product",
      shopify_id: shopify_id,
      handle: handle,
      description: shopify_product["body_html"] || shopify_product[:body_html],
      vendor: shopify_product["vendor"] || shopify_product[:vendor],
      product_type: shopify_product["product_type"],
      tags: extract_tags(shopify_product),
      created_at_shopify: parse_shopify_datetime(shopify_product["created_at"]),
      updated_at_shopify: parse_shopify_datetime(shopify_product["updated_at"]),
      last_seen_at: now
    }

    # Get variant data for pricing
    variant = List.first(shopify_product[:variants] || [])

    product_attrs =
      if variant do
        Map.merge(product_attrs, %{
          price: parse_price_from_cents(variant[:price]),
          compare_at_price: parse_price_from_cents(variant[:compare_at_price]),
          available: variant[:available] == true,
          inventory_quantity: variant[:inventory_quantity]
        })
      else
        product_attrs
      end

    # Get image URL
    image = List.first(shopify_product[:images] || [])

    product_attrs =
      if image do
        Map.put(product_attrs, :image_url, image[:src])
      else
        product_attrs
      end

    # Compare by handle (unique per store) instead of shopify_id
    case Map.get(existing_by_handle, handle) do
      nil ->
        # New product - insert (only generate alert if not first sync)
        insert_new_product(product_attrs, stats, is_first_sync)

      existing_product ->
        # Existing product - check for changes and update (always generate alerts for changes)
        update_existing_product(existing_product, product_attrs, stats)
    end
  end

  defp insert_new_product(product_attrs, stats, is_first_sync) do
    case Product.changeset(%Product{}, product_attrs) |> Repo.insert() do
      {:ok, product} ->
        # Always log creation for audit trail
        log_product_change(product, "created", nil, nil, %{})

        # Only generate alert for new products if this is NOT the first sync
        alert_count =
          if is_first_sync do
            # Don't generate alerts during initial sync
            0
          else
            # Generate new product alert for subsequent syncs
            generate_alert(
              product,
              "new_product",
              "New Product Added",
              "New product '#{product.title}' has been added to the store."
            )

            1
          end

        %{
          stats
          | new_products: stats.new_products + 1,
            changes_logged: stats.changes_logged + 1,
            alerts_generated: stats.alerts_generated + alert_count
        }

      {:error, _changeset} ->
        stats
    end
  end

  defp update_existing_product(existing_product, new_attrs, stats) do
    changeset = Product.changeset(existing_product, new_attrs)

    # Filter out last_seen_at changes as they're just maintenance updates
    filtered_changes = Map.delete(changeset.changes, :last_seen_at)

    if filtered_changes == %{} do
      # No meaningful changes, just update last_seen_at
      Product.changeset(existing_product, %{last_seen_at: new_attrs.last_seen_at})
      |> Repo.update()

      stats
    else
      # Has changes - update and log them (always generate alerts for updates)
      case Repo.update(changeset) do
        {:ok, updated_product} ->
          stats = log_product_changes(existing_product, updated_product, changeset.changes, stats)
          %{stats | updated_products: stats.updated_products + 1}

        {:error, _changeset} ->
          stats
      end
    end
  end

  defp soft_delete_product(product, stats, is_first_sync) do
    now = DateTime.utc_now()

    case Product.changeset(product, %{deleted_at: now}) |> Repo.update() do
      {:ok, updated_product} ->
        # Always log deletion for audit trail
        log_product_change(updated_product, "deleted", nil, nil, %{deleted_at: now})

        # Only generate alert if NOT first sync and product was available
        alert_count =
          if is_first_sync do
            # Don't generate alerts during initial sync
            0
          else
            # Generate out of stock alert if it was available
            if product.available do
              generate_alert(
                updated_product,
                "out_of_stock",
                "Product No Longer Available",
                "Product '#{product.title}' is no longer available in the store."
              )

              1
            else
              0
            end
          end

        %{
          stats
          | deleted_products: stats.deleted_products + 1,
            changes_logged: stats.changes_logged + 1,
            alerts_generated: stats.alerts_generated + alert_count
        }

      {:error, _changeset} ->
        stats
    end
  end

  defp log_product_changes(old_product, new_product, changes, stats) do
    changes_logged =
      Enum.reduce(changes, 0, fn {field, new_value}, acc ->
        old_value = Map.get(old_product, field)

        change_attrs = %{
          product_id: new_product.id,
          change_type: determine_change_type(field, old_value, new_value),
          field_name: to_string(field),
          old_value: serialize_value(old_value),
          new_value: serialize_value(new_value)
        }

        # Add price difference for price changes
        change_attrs =
          if field in [:price, :compare_at_price] and is_number(old_value) and
               is_number(new_value) do
            Map.put(change_attrs, :price_difference, Decimal.sub(new_value, old_value))
          else
            change_attrs
          end

        case ProductChange.changeset(%ProductChange{}, change_attrs) |> Repo.insert() do
          {:ok, _change} ->
            # Generate alerts for significant changes (always, regardless of first sync)
            generate_change_alerts(new_product, field, old_value, new_value)
            acc + 1

          {:error, _} ->
            acc
        end
      end)

    %{
      stats
      | changes_logged: stats.changes_logged + changes_logged,
        alerts_generated: stats.alerts_generated + changes_logged
    }
  end

  defp log_product_change(product, change_type, old_value, new_value, metadata) do
    ProductChange.changeset(%ProductChange{}, %{
      product_id: product.id,
      change_type: change_type,
      old_value: serialize_value(old_value),
      new_value: serialize_value(new_value),
      metadata: metadata
    })
    |> Repo.insert()
  end

  defp determine_change_type(field, _old_value, _new_value) do
    case field do
      f when f in [:price, :compare_at_price] -> "price_change"
      :available -> "availability_change"
      _ -> "updated"
    end
  end

  defp generate_change_alerts(product, :price, old_price, new_price)
       when is_number(old_price) and is_number(new_price) do
    if Decimal.compare(new_price, old_price) == :lt do
      price_drop = Decimal.sub(old_price, new_price)

      generate_alert(
        product,
        "price_drop",
        "Price Drop Alert",
        "Price dropped by $#{price_drop} for '#{product.title}' (was $#{old_price}, now $#{new_price})"
      )
    end
  end

  defp generate_change_alerts(product, :compare_at_price, old_price, new_price)
       when is_number(old_price) and is_number(new_price) do
    if Decimal.compare(new_price, old_price) == :lt do
      price_drop = Decimal.sub(old_price, new_price)

      generate_alert(
        product,
        "price_drop",
        "Compare Price Drop Alert",
        "Compare price dropped by $#{price_drop} for '#{product.title}' (was $#{old_price}, now $#{new_price})"
      )
    end
  end

  defp generate_change_alerts(product, :available, true, false) do
    generate_alert(
      product,
      "out_of_stock",
      "Out of Stock",
      "'#{product.title}' is now out of stock."
    )
  end

  defp generate_change_alerts(product, :available, false, true) do
    generate_alert(
      product,
      "back_in_stock",
      "Back in Stock",
      "'#{product.title}' is back in stock!"
    )
  end

  defp generate_change_alerts(product, field, _old_value, _new_value)
       when field in [:title, :description] do
    generate_alert(
      product,
      "product_info_update",
      "Product Information Updated",
      "#{String.capitalize(to_string(field))} updated for '#{product.title}'"
    )
  end

  defp generate_change_alerts(_product, _field, _old_value, _new_value), do: :ok

  defp generate_alert(product, alert_type, title, message) do
    Alert.changeset(%Alert{}, %{
      product_id: product.id,
      store_id: product.store_id,
      alert_type: alert_type,
      title: title,
      message: message,
      triggered_at: DateTime.utc_now()
    })
    |> Repo.insert()
  end

  defp broadcast_sync_progress(store_id, progress_data) do
    Phoenix.PubSub.broadcast(
      ShopifyScraper.PubSub,
      "store_sync:#{store_id}",
      {:sync_progress, progress_data}
    )

    # Also broadcast to general dashboard topic for overall updates
    Phoenix.PubSub.broadcast(
      ShopifyScraper.PubSub,
      "dashboard_updates",
      {:store_sync_progress, store_id, progress_data}
    )
  end

  defp update_store_sync_status(store, status, error_message \\ nil) do
    attrs = %{sync_status: status}
    attrs = if error_message, do: Map.put(attrs, :error_message, error_message), else: attrs

    # Use a short transaction for status updates
    Repo.transaction(fn ->
      Store.changeset(store, attrs) |> Repo.update()
    end)
  end

  defp update_store_sync_completion(store, _stats) do
    # Use a short transaction for completion updates
    Repo.transaction(fn ->
      Store.changeset(store, %{
        sync_status: "completed",
        last_synced_at: DateTime.utc_now(),
        product_count: get_active_product_count(store.id),
        collection_count: get_active_collection_count(store.id),
        error_message: nil
      })
      |> Repo.update()
    end)
  end

  defp parse_shopify_datetime(nil), do: nil

  defp parse_shopify_datetime(datetime_string) when is_binary(datetime_string) do
    case DateTime.from_iso8601(datetime_string) do
      {:ok, datetime, _} -> datetime
      _ -> nil
    end
  end

  defp parse_price_from_cents(value) when is_binary(value) do
    case Decimal.parse(value) do
      {decimal, _} -> decimal
      :error -> nil
    end
  end

  defp parse_price_from_cents(nil), do: nil

  defp parse_price_from_cents(cents) when is_number(cents),
    do: Decimal.div(Decimal.new(cents), 100)

  defp parse_price_from_cents(_), do: nil

  defp extract_tags(shopify_product) do
    case shopify_product["tags"] || shopify_product[:tags] do
      nil -> []
      tags when is_binary(tags) -> String.split(tags, ",")
      tags when is_list(tags) -> tags
      _ -> []
    end
  end

  defp serialize_value(value) when is_list(value), do: Jason.encode!(value)
  defp serialize_value(value), do: to_string(value)

  defp format_error_message({:http_error, status, body}) do
    "HTTP #{status}: #{inspect(body)}"
  end

  defp format_error_message(reason) when is_binary(reason) do
    reason
  end

  defp format_error_message(reason) do
    inspect(reason)
  end

  defp get_active_product_count(store_id) do
  end

  defp get_active_collection_count(store_id) do
    from(c in Collection, where: c.store_id == ^store_id)
    |> Repo.aggregate(:count, :id)

    from(p in Product, where: p.store_id == ^store_id and is_nil(p.deleted_at))
    |> Repo.aggregate(:count, :id)
  end
end

Also I understand that llms can mess up. But can we have some kind of check. Like if same error message is appearing twice or thrice, its better to get human intervention. My tokens get exhausted every time I leave the computer unattended. I asked for the same on twitter. :face_with_peeking_eye: