JSON encode GEO.Point from geo library as human readable form - ecto

I have this schema which has a Geo Geo.Point:
defmodule Api.Shop do
use Ecto.Schema
import Ecto.Changeset
import Api.Repo
import Ecto.Query
#derive {Poison.Encoder, only: [:name, :place_id, :geo_json, :distance]}
schema "shops" do
field :name, :string
field :place_id, :string
field :point, Geo.Point
field :geo_json, :string, virtual: true
field :distance, :float, virtual: true
timestamps()
end
def encode_model(shop) do
%Api.Shop{shop | geo_json: Geo.JSON.encode(shop.point) }
end
defimpl Poison.Encoder, for: Api.Shop do
def encode(shop, options) do
shop = Api.Shop.encode_model(shop)
Poison.Encoder.Map.encode(Map.take(shop, [:id, :name, :geo_json]), options)
end
end
def changeset(shop, params \\ %{}) do
shop
|> cast(params, [:name, :place_id, :point])
|> validate_required([:name, :place_id, :point])
|> unique_constraint(:place_id)
end......
end
And when I return the shop.point field in a query:
def create_query_no_keyword(categories, shop_ids) do
products_shops_categories = from p in Product,
distinct: p.id,
join: ps in ProductShop, on: p.id == ps.p_id,
join: s in Shop, on: s.id == ps.s_id,
join: pc in ProductCategory, on: p.id == pc.p_id,
join: c in Subcategory, on: c.id == pc.c_id,
where: c.id in ^categories,
where: s.id in ^shop_ids,
group_by: [p.id, p.name, p.brand],
select: %{product: p, categories: fragment("json_agg( DISTINCT (?, ?)) AS category", c.id, c.name), shops: fragment("json_agg( DISTINCT (?, ?, ?)) AS shop", s.id, s.name, s.point)}
end
What gets returned is actually 0101000020E6100000A3BDB0EB0DD9654030AC2C1BE76D42C0 which is the wrong format - WKB. I'm looking to encode as WKT which has readable coordinates.
How do I get s.point to be WKT format and thus have coordinates, when it is returned by the query?

I found this Stack Exchange GIS answer to be the solution:
use this for point object:
SELECT ST_AsText(the_geom)
FROM myTable; and viewing X,Y and geom object:
SELECT ST_X(the_geom), ST_Y(the_geom), ST_AsText(the_geom)
FROM myTable;
The Geo library is using PostGIS and the solution was PostGIS specific. You need to select the column using ST_AsText, or ST_X and ST_Y from PostGIS.
My select statement changed to this:
select: %{product: p, categories: fragment("json_agg( DISTINCT (?, ?)) AS category", c.id, c.name), shops: fragment("json_agg( DISTINCT (?, ?, ST_X(?), ST_Y(?))) AS shop", s.id, s.name, s.point, s.point)}

Related

Postgres JOIN child as JSON

I have a node server accessing a postgres database through a npm package, pg, and have a working query that returns the data, but I think it may be able to be optimized. The data model is of versions and features, one version has many feature children. This query pattern works in a few contexts for my app, but it looks clumsy. Is there a cleaner way?
SELECT
v.*,
coalesce(
(SELECT array_to_json(array_agg(row_to_json(x))) FROM (select f.* from app_feature f where f.version = v.id) x ),
'[]'
) as features FROM app_version v
CREATE TABLE app_version(
id SERIAL PRIMARY KEY,
major INT NOT NULL,
mid INT NOT NULL,
minor INT NOT NULL,
date DATE,
description VARCHAR(256),
status VARCHAR(24)
);
CREATE TABLE app_feature(
id SERIAL PRIMARY KEY,
version INT,
description VARCHAR(256),
type VARCHAR(24),
CONSTRAINT FK_app_feature_version FOREIGN KEY(version) REFERENCES app_version(id)
);
INSERT INTO app_version (major, mid, minor, date, description, status) VALUES (0,0,0, current_timestamp, 'initial test', 'PENDING');
INSERT INTO app_feature (version, description, type) VALUES (1, 'store features', 'New Feature')
INSERT INTO app_feature (version, description, type) VALUES (1, 'return features as json', 'New Feature');
The subquery in FROM clause may not be needed.
select v.*,
coalesce((select array_to_json(array_agg(row_to_json(f)))
from app_feature f
where f.version = v.id), '[]') as features
from app_version v;
And my 5 cents. Pls. note that id is primary key of app_version so it's possible to group by app_version.id only.
select v.*, coalesce(json_agg(to_json(f)), '[]') as features
from app_version v join app_feature f on f.version = v.id
group by v.id;
You could move the JSON aggregation into a view, then join to the view:
create view app_features_json
as
select af.version,
json_agg(row_to_json(af)) as features
from app_feature af
group by af.version;
The use that view in a join:
SELECT v.*,
fj.features
FROM app_version v
join app_features_json afj on afj.version = v.id

Netsuite get "Currency Revaluation (Unrealized Gain/Loss)" Table

I am new to netsuite,
Is it possible to get the table "Currency Revaluation (Unrealized Gain/Loss)" using suitescript QL.
I would like to use it in Suite QL
here is an sample:
SELECT
NT .*
FROM
NextTransactionLineLink AS NTLL
INNER JOIN Transaction AS NT ON (NT.ID = NTLL.NextDoc)
INNER JOIN Transaction ON (Transaction.ID = NTLL.PreviousDoc)
inner join transactionline tl ON (tl.TRANSACTION = Transaction.id)
inner join subsidiary sb ON sb.id = tl.subsidiary
inner join accountingperiod ap ON (
(ap.id = Transaction.postingperiod)
AND ap.isposting = 'T'
)
inner join accountingperiod pap ON (
(pap.id = NT.postingperiod)
AND pap.isposting = 'T'
)
inner join currencyrate cr1 ON (
cr1.basecurrency = sb.currency
AND cr1.transactioncurrency = Transaction.currency
AND (
cr1.effectivedate = To_date(
ap.startdate,
'MM/DD/YYYY'
)
)
)
inner join consolidatedexchangerate cexr ON (
cexr.postingperiod = Transaction.postingperiod
AND cexr.fromsubsidiary = tl.subsidiary
AND cexr.tosubsidiary = 1
)
WHERE
(NTLL.NextDoc = 212328)
Thanks in Advance
By "getting", I am assuming you want to load this record and the fetch/update the values within.
Simply use &xml=t in the url, after the URL of the record
Something like this -
https://342xxxx-sb1.app.netsuite.com/app/accounting/transactions/vendbill.nl?id=00000&whence=&xml=t
You will get the record type.
<record recordType="vendorbill"
Use this record type in your Script.
var objRecord = record.load({
type: record.Type.VENDOR_BILL,
id: 157,
isDynamic: true,
});
If in case the record is a Custom Record, use this
var newFeatureRecord = record.load({
type: 'customrecord_feature',
id: 1,
isDynamic: true
});
Let me know in case of issues in the comments below.

SQLAlchemy Core. Values method does not limit columns in insert statement

SQLALchemy Core insert expressions documentation says:
Notice above that the INSERT statement names every column in the users table. This can be limited by using the values() method, which establishes the VALUES clause of the INSERT explicitly:
With that in mind, I wrote the following snippet, which returns unexpected results.
from datetime import datetime
import sqlalchemy
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
metadata = sqlalchemy.MetaData()
users = sqlalchemy.Table(
"users",
metadata,
sqlalchemy.Column(
"id",
postgresql.UUID(as_uuid=True),
default=uuid.uuid4(),
primary_key=True,
),
sqlalchemy.Column("email", types.String, unique=True, index=True),
sqlalchemy.Column(
"created_at",
types.TIMESTAMP(timezone=True),
default=datetime.utcnow(),
),
sqlalchemy.Column(
"updated_at",
types.TIMESTAMP(timezone=True),
default=datetime.utcnow(),
onupdate=datetime.utcnow(),
),
)
email = "god#olympus.org"
query = users.insert().values(email=email)
# (Pdb) print(query)
# INSERT INTO users (id, email, created_at, updated_at) VALUES (:id, :email, :created_at, :updated_at)
#
# (Pdb) print(query.compile().params)
# {'id': None, 'email': 'god#olympus.org', 'created_at': None, 'updated_at': None}
I expected the query to be INSERT INTO users (email) VALUES (:email)
Is there anything I'm missing?
I'm using SQLAlchemy==1.3.20 by the way.
The issue does not relate to SQLAlchemy, but rather to encode/databases.
Support for "default" parameter in sqlalchemy.Column

Psycopg2 upsert multiple rows with one query

I have table in postgresql with fields id (unique) and val.
I want to execute something like this:
INSERT INTO my_table (id, val)
VALUES (%(id)s, %(val)s)
ON CONFLICT(id) DO UPDATE
SET val = val + %(val)s
Data to insert is like [{"id": 123, "val": 5}, {"id": 456, "val": 8}, ...]. Is there any way to upsert all of these with one query?
cursor.executemany won't do, it's the same as to make queries with all of these dicts in a loop one after another.
Without ON CONFLICT DO UPDATE I could just do something like "insert into mytable (id, val) values " + ', '.join(['(%s, %s)'] * len(data)) and transform data to the list [id1, val1, id2, val2, ...]. But I've no idea how to combine multiple values to insert and update statement.
I have the same problem. After a while searching, I found two post:
Posgresql - upsert: https://dba.stackexchange.com/questions/167591/postgresql-psycopg2-upsert-syntax-to-update-columns
Psycopg2 - insert many rows: psycopg2: insert multiple rows with one query
=> So the answer for you is:
# my table: cas(address, id, description) - address is primary key
data = [('0x18f9f00a432F50c6E2429d31776724d3cB873BEF', '1000', 'mot ngan'),
('0x06471C53CE649Eb4dA88b792D500544A7E5C9635', '2000', 'hai ngan')]
args = [cur.mogrify('(%s, %s, %s)', x).decode('utf-8')
for x in data]
args_str = ', '.join(args)
cur.execute('''INSERT INTO cas (address, id, description) VALUES'''
+ args_str +
'''ON CONFLICT (address) DO UPDATE SET
(address, id, description) = (EXCLUDED.address, EXCLUDED.id, EXCLUDED.description)''')

Ecto - select all fields from all tables in a 5 table join

I have a products table, a categories table, and a shops table, as well as joining tables product_shops, and product_categories.
I want to select all products that have a category which is in the conn.query_params, that are in shops that are within a certain distance which is specified in the conn.query_params. And I want to return pretty much every field of the product, category, and shop that was selected.
I have this:
get "/products" do
query = conn.query_params
point = %Geo.Point{coordinates: {String.to_float(query["longitude"]), String.to_float(query["latitude"])}, srid: 4326}
shops = within(Shop, point, String.to_float(query["distanceFromPlaceValue"]) * 1000) |> order_by_nearest(point) |> select_with_distance(point) |> Api.Repo.all
categories = Enum.map(query["categories"], fn(x) -> String.to_integer(x) end)
shop_ids = Enum.map(shops, fn(x) -> x.id end)
query1 = from p in Product,
join: ps in ProductShop, on: p.id == ps.p_id,
join: s in Shop, on: s.id == ps.s_id,
join: pc in ProductCategory, on: p.id == pc.p_id,
join: c in Category, on: c.id == pc.c_id,
where: Enum.member?(categories, c.id),
where: Enum.member?(shop_ids, s.id),
select: p, c, s,
group_by s,
order_by s.distance
In the code above, the shops variable holds all the shops that are within the specified distance.
this is inside shop.ex to get all shops within distance, ordered from the closest shop:
def within(query, point, radius_in_m) do
{lng, lat} = point.coordinates
from(shop in query, where: fragment("ST_DWithin(?::geography, ST_SetSRID(ST_MakePoint(?, ?), ?), ?)", shop.point, ^lng, ^lat, ^point.srid, ^radius_in_m))
end
def order_by_nearest(query, point) do
{lng, lat} = point.coordinates
from(shop in query, order_by: fragment("? <-> ST_SetSRID(ST_MakePoint(?,?), ?)", shop.point, ^lng, ^lat, ^point.srid))
end
def select_with_distance(query, point) do
{lng, lat} = point.coordinates
from(shop in query, select: %{shop | distance: fragment("ST_Distance_Sphere(?, ST_SetSRID(ST_MakePoint(?,?), ?))", shop.point, ^lng, ^lat, ^point.srid)})
end
This is the shop schema and the distance field gets populated when the select_with_distance variable is called.
#derive {Poison.Encoder, only: [:name, :place_id, :point]}
schema "shops" do
field :name, :string
field :place_id, :string
field :point, Geo.Point
field :distance, :float, virtual: true
timestamps()
end
My current error is in the select: p, c, s line as I'm unsure how to select the whole lot:
== Compilation error on file lib/api/router.ex ==
** (SyntaxError) lib/api/router.ex:153: syntax error before: c
(elixir) lib/kernel/parallel_compiler.ex:117: anonymous fn/4 in Kernel.ParallelCompiler.spawn_compilers/1
Also unsure if the group_by should be the shop. I just know I want to return all fields and only unique fields, ordered by proximity of the shop. I feel most of the way there but a bit stuck on the select, group_by, and order_by.
EDIT: Thanks Dogbert for fixing those two issues in the comments.
currently code is:
products_shops_categories = from p in Product,
join: ps in ProductShop, on: p.id == ps.p_id,
join: s in Shop, on: s.id == ps.s_id,
join: pc in ProductCategory, on: p.id == pc.p_id,
join: c in Category, on: c.id == pc.c_id,
where: c.id in ^categories,
where: s.id in ^shop_ids,
select: {p, c, s}
group_by s,
order_by s.distance
I have this error now:
** (Ecto.Query.CompileError) `order_by(s.distance())` is not a valid query expression.
* If you intended to call a database function, please check the documentation
for Ecto.Query to see the supported database expressions
* If you intended to call an Elixir function or introduce a value,
you need to explicitly interpolate it with ^
expanding macro: Ecto.Query.group_by/2
lib/api/router.ex:155: Api.Router.do_match/4
(elixir) lib/kernel/parallel_compiler.ex:117: anonymous fn/4 in Kernel.ParallelCompiler.spawn_compilers/1

Resources