AK
Size: a a a
AK
DI
u
KA
KA
a
Repo.stream
генерит запросы в БД без LIMIT+OFFSET?a
find_each
делает умнее User Load (2.9ms) SELECT "users".* FROM "users" WHERE ("users"."id" > 14058) ORDER BY "users"."id" ASC LIMIT $1 [["LIMIT", 1000]]
ŹR
ŹR
a
."locale", u0."referral_channel", u0."email", u0."challenge_ids", u0."avatar", u0."migration_version", u0."configuration_id", u0."inserted_at", u0."updated_at" FROM "users" AS u0
2019-11-01 13:44:13.609 +03 [63083] LOG: execute fetch from <unnamed>/G2: SELECT u0."id", u0."nickname", u0."access_key", u0."online", u0."extra", u0."timezone_offset", u0."last_visit_at", u0."locale", u0."referral_channel", u0."email", u0."challenge_ids", u0."avatar", u0."migration_version", u0."configuration_id", u0."inserted_at", u0."updated_at" FROM "users" AS u0
2019-11-01 13:44:13.632 +03 [63083] LOG: execute fetch from <unnamed>/G2: SELECT u0."id", u0."nickname", u0."access_key", u0."online", u0."extra", u0."timezone_offset", u0."last_visit_at", u0."locale", u0."referral_channel", u0."email", u0."challenge_ids", u0."avatar", u0."migration_version", u0."configuration_id", u0."inserted_at", u0."updated_at" FROM "users" AS u0
2019-11-01 13:44:13.666 +03 [63083] LOG: execute fetch from <unnamed>/G2: SELECT u0."id", u0."nickname", u0."access_key", u0."online", u0."extra", u0."timezone_offset", u0."last_visit_at", u0."locale", u0."referral_channel", u0."email", u0."challenge_ids", u0."avatar", u0."migration_version", u0."configuration_id", u0."inserted_at", u0."updated_at" FROM "users" AS u0
2019-11-01 13:44:13.700 +03 [63083] LOG: execute fetch from <unnamed>/G2: SELECT u0."id", u0."nickname", u0."access_key", u0."online", u0."extra", u0."timezone_offset", u0."last_visit_at", u0."locale", u0."referral_channel", u0."email", u0."challenge_ids", u0."avatar", u0."migration_version", u0."configuration_id", u0."inserted_at", u0."updated_at" FROM "users" AS u0
2019-11-01 13:44:13.724 +03 [63083] LOG: execute fetch from <unnamed>/G2: SELECT u0."id", u0."nickname", u0."access_key", u0."online", u0."extra", u0."timezone_offset", u0."last_visit_at", u0."locale", u0."referral_channel", u0."email", u0."challenge_ids", u0."avatar", u0."migration_version", u0."configuration_id", u0."inserted_at", u0."updated_at" FROM "users" AS u0
2019-11-01 13:44:13.753 +03 [63083] LOG: execute fetch from <unnamed>/G2: SELECT u0."id", u0."nickname", u0."access_key", u0."online", u0."extra", u0."timezone_offset", u0."last_visit_at", u0."locale", u0."referral_channel", u0."email", u0."challenge_ids", u0."avatar", u0."migration_version", u0."configuration_id", u0."inserted_at", u0."updated_at" FROM "users" AS u0
a
a
ŹR
a
defmodule UserExporter do
@columns ~w( id email inserted_at updated_at )a
def export(query) do
path = "/tmp/users.csv"
Repo.transaction fn ->
query
|> Repo.stream
|> Stream.map(&parse_line/1)
|> CSV.encode
|> Enum.into(File.stream!(path, [:write, :utf8]))
end
end
defp parse_line(user) do
# order our data to match our column order
Enum.map(@columns, &Map.get(user, &1))
end
end
T
ŹR
defmodule UserExporter do
@columns ~w( id email inserted_at updated_at )a
def export(query) do
path = "/tmp/users.csv"
Repo.transaction fn ->
query
|> Repo.stream
|> Stream.map(&parse_line/1)
|> CSV.encode
|> Enum.into(File.stream!(path, [:write, :utf8]))
end
end
defp parse_line(user) do
# order our data to match our column order
Enum.map(@columns, &Map.get(user, &1))
end
end
A
Repo.stream
генерит запросы в БД без LIMIT+OFFSET?a
A
ŹR