How does one update a model with a nested association (using [Elixir, Phoenix, Ecto])?
I've tried the following, to treat it as part of it's parent update, without success (using the platformatec blog as inspiration).
Models:
schema "user" do
has_one :address, {"users_addresses", MyApp.Address}, foreign_key: :assoc_id
end
#required_fields ~w(address)
------
# Materialized in users_addresses table
schema "abstract table: addresses" do
field :assoc_id, :integer
field :street_address, :string
end
Request (PATCH):
{
"user" => {
"address" => {
"street_address" => "1234"
}
}
}
Controller:
def update(conn, %{"id" => id, "user" => params}) do
user = MyApp.Repo.get(User, id)
|> MyApp.Repo.preload [:address]
if is_nil(user) do
send_resp(conn, 404, "")
else
changeset = User.changeset(user, params)
if changeset.valid? do
case MyApp.Repo.update(changeset) do
{:ok, model} -> send_resp(conn, 204, "")
{:error, changeset} -> conn
|> put_status(:unprocessable_entity)
|> render(MyApp.ChangesetView, "error.json", changeset: changeset)
end
else
conn
|> put_status(:unprocessable_entity)
|> render(MyApp.ChangesetView, "error.json", changeset: changeset)
end
end
end
Changeset (from logs):
%Ecto.Changeset{action: nil,
changes: %{address: %Ecto.Changeset{action: :insert, changes: %{street_address: "1234"},
constraints: [],
....
model: %MyApp.User{__meta__: #Ecto.Schema.Metadata<:loaded>,
address: %MyApp.Address{__meta__: #Ecto.Schema.Metadata<:loaded>,
assoc_id: 1229, id: 308,
street_address: "41423 Jakubowski Village"
....
}
}
}
Error: Fixed as of Ecto v1.0.3 or later
** (exit) an exception was raised:
** (Postgrex.Error) ERROR (undefined_table): relation "abstract table: addresses" does not exist
(ecto) lib/ecto/adapters/sql.ex:479: Ecto.Adapters.SQL.model/6
(ecto) lib/ecto/repo/model.ex:219: Ecto.Repo.Model.apply/4
(ecto) lib/ecto/repo/model.ex:71: anonymous fn/10 in Ecto.Repo.Model.insert/4
(ecto) lib/ecto/repo/model.ex:340: anonymous fn/3 in Ecto.Repo.Model.wrap_in_transaction/8
(ecto) lib/ecto/adapters/sql.ex:531: anonymous fn/10 in Ecto.Adapters.SQL.transaction/3
(ecto) lib/ecto/pool.ex:262: Ecto.Pool.inner_transaction/3
(ecto) lib/ecto/adapters/sql.ex:534: Ecto.Adapters.SQL.transaction/3
(ecto) lib/ecto/association.ex:368: Ecto.Association.Has.on_repo_action/7
(Postgrex.Error) ERROR (undefined_table): relation "abstract table: addresses" does not exist was due to a bug, which should be fixed in Ecto v1.0.3 or later.
The code above is missing an id for the address, without this, Ecto will insert a new resource instead of updating an existing one.
Request (PATCH):
{
"user" => {
"address" => {
"id" => 4,
"street_address" => "1234"
}
}
}
The new controller code, including some of JoseValims suggested improvements:
def update(conn, %{"id" => id, "user" => params}) do
user = MyApp.Repo.get!(User, id)
|> MyApp.Repo.preload [:address
changeset = User.changeset(user, params)
case MyApp.Repo.update(changeset) do
{:ok, model} -> send_resp(conn, 204, "")
{:error, changeset} -> conn
|> put_status(:unprocessable_entity)
|> render(MyApp.ChangesetView, "error.json", changeset: changeset)
end
end
Or, in this situation since the address is both required and has_one, the id can be added server side:
def update(conn, %{"id" => id, "user" => params}) do
user = MyApp.Repo.get!(User, id)
|> MyApp.Repo.preload [:address]
address = params["address"]
address = Map.put address, "id", user.address.id
params = Map.put params, "address", address
changeset = User.changeset(user, params)
case MyApp.Repo.update(changeset) do
{:ok, model} -> send_resp(conn, 204, "")
{:error, changeset} -> conn
|> put_status(:unprocessable_entity)
|> render(MyApp.ChangesetView, "error.json", changeset: changeset)
end
end
Related
My query checks while inserting if a record is duplicate
def insertValues(tableName:String, model:User):Insert = {
QueryBuilder.insertInto(tableName).value("bucket",model.profile.internalProfileDetails.get.bucketId)
....
.ifNotExists();
}
I am saving a duplicate entry and expect that Cassandra will return an error. Instead I am getting the existing record back. Shouldn't Insert return an error?
def save(user:User):Future[Option[User]] = Future {
saveDataToDatabase(user)
}
def saveDataToDatabase(data:M):Option[M] = {
println("inserting in table "+tablename+" with partition key "+partitionKeyColumns +" and values "+data)
val insertQuery = insertValues(tablename,data)
println("insert query is "+insertQuery)
try {
val resultSet = session.execute(insertQuery) //execute can take a Statement. Insert is derived from Statement so I can use Insert.
println("resultset after insert: " + resultSet)
Some(data)
}catch {
case e:Exception => {
println("cassandra exception "+e)
None
}
}
}
The table schema is
users (
bucket int,
email text,
authprovider text,
firstname text,
lastname text,
confirmed boolean,
hasher text,
id uuid,
password text,
salt text,
PRIMARY KEY ((bucket, email), authprovider, firstname, lastname)
In my test case, I expect return value to be None but I am getting Some(user)
"UsersRepository" should {
"not save a new user if the user already exist in the system" in {
val insertUserStatement =
s"""
| INSERT INTO users (bucket,email,firstname,lastname,authprovider,password,confirmed,id,hasher,salt) VALUES
| (1,'${testEnv.email}','fn','ln','${testEnv.loginInfo.providerID}','somePassword',false,${testEnv.mockHelperMethods.getUniqueID()},'someHasher','someSalt')
""".stripMargin
testCassandra.executeScripts(new CqlStatements(insertUserStatement))
val userKeys = UserKeys(1, testEnv.email ,testEnv.loginInfo, "fn", "ln")
val cassandraConnectionService = CassandraConnectionManagementService()
val (cassandraSession,cluster) = cassandraConnectionService.connectWithCassandra()
cassandraConnectionService.initKeySpace(cassandraSession,"mykeyspace")
val userRepository = new UsersRepository(testEnv.mockHelperMethods,cassandraSession,"users")
val resultCheckUser = await[Option[User]](userRepository.findOne(userKeys))(Timeout(Duration(5000,"millis")))
val user = User(UUID.fromString("11111111-1111-1111-1111-111111111111"),
UserProfile(
Some(InternalUserProfile(LoginInfo("credentials","test#test.com"),1,false,Some(PasswordInfo("someHasher","somePassword",None)))),
ExternalUserProfile("test#test.com","fn","ln",None)))
println(s"found initial user result ${resultCheckUser}")
resultCheckUser mustBe Some(user)
println(s"user already exists. Will try to add duplicate ")
println(s"adding user with user ${user}")
val resultAddUser = await[Option[User]](userRepository.save(user))(Timeout(Duration(5000,"millis")))
resultAddUser mustBe None
}
}
Output of test execution
insert query is INSERT INTO users (bucket,email,authprovider,firstname,lastname,confirmed,id,password,hasher,salt) VALUES (1,'test#test.com','credentials','fn','ln',false,11111111-1111-1111-1111-111111111111,'somePassword','someHasher','') IF NOT EXISTS;
[info] c.g.n.e.c.Cassandra - INFO [Native-Transport-Requests-1] 2019-06-07 06:13:57,659 OutboundTcpConnection.java:108 - OutboundTcpConnection using coalescing strategy DISABLED
[info] c.g.n.e.c.Cassandra - INFO [HANDSHAKE-localhost/127.0.0.1] 2019-06-07 06:13:57,683 OutboundTcpConnection.java:560 - Handshaking version with localhost/127.0.0.1
resultset after insert: ResultSet[ exhausted: false, Columns[[applied](boolean), bucket(int), email(varchar), authprovider(varchar), firstname(varchar), lastname(varchar), confirmed(boolean), hasher(varchar), id(uuid), password(varchar), salt(varchar)]]
running afterEach statements
afterEach: cassandra state is STARTED
[debug] c.g.n.e.c.t.TestCassandra - Stop TestCassandra 3.11.1
Some(User(11111111-1111-1111-1111-111111111111,UserProfile(Some(InternalUserProfile(LoginInfo(credentials,test#test.com),1,false,Some(PasswordInfo(someHasher,somePassword,None)))),ExternalUserProfile(test#test.com,fn,ln,None)))) was not equal to None
ScalaTestFailureLocation: UnitSpecs.RepositorySpecs.UsersRepositorySpecs at (UsersRepositorySpecs.scala:362)
Expected :None
Actual :Some(User(11111111-1111-1111-1111-111111111111,UserProfile(Some(InternalUserProfile(LoginInfo(credentials,test#test.com),1,false,Some(PasswordInfo(someHasher,somePassword,None)))),ExternalUserProfile(test#test.com,fn,ln,None))))
executeQuery returns ResultSet which has wasApplied method. This method returns true if the insert operation was done, otherwise it returns false. If a record is duplicate, wasApplied is false.
try {
val resultSet = session.execute(insertQuery) //execute can take a Statement. Insert is derived from Statement so I can use Insert.
println("resultset after insert: " + resultSet)
if(resultSet.wasApplied()){
Some(data)
} else {
None
}
}catch {
case e:Exception => {
println("cassandra exception "+e)
None
}
}
From the code below, when I call conn.params["geo"], I get the following error:
test/plugs/geoip_test.exs:4
** (UndefinedFunctionError) function Plug.Conn.Unfetched.fetch/2 is undefined (Plug.Conn.Unfetched does not implement the Access behaviour)
stacktrace:
(plug) Plug.Conn.Unfetched.fetch(%{:__struct__ => Plug.Conn.Unfetched, :aspect => :params, "geo" => "Mountain View, US", "ip" => "8.8.8.8"}, "geo")
...
defmodule AgilePulse.Plugs.GeoIPTest do
use AgilePulse.ConnCase
test "returns Mountain View for 8.8.8.8" do
conn = build_conn
params = Map.put(conn.params, "ip", "8.8.8.8")
conn = Map.put(conn, :params, params) |> AgilePulse.Plugs.GeoIP.call(%{})
assert conn.params["geo"] == "Mountain View, US"
end
end
defmodule AgilePulse.Plugs.GeoIP do
import Plug.Conn
def init(opts), do: opts
def call(%Plug.Conn{params: %{"ip" => ip}} = conn, _opts) do
geo = set_geo(ip)
params = Map.put(conn.params, "geo", geo)
Map.put(conn, :params, params)
end
def call(conn, _opts), do: conn
...
end
Could someone enlighten me on why this is failing and what the appropriate solution is? TY!
Short answer: Change this:
params = Map.put(conn.params, "ip", "8.8.8.8")
To:
params = %{"ip": "8.8.8.8"}
Explanation: Phoenix.ConnTest.build_conn/0 returns a Conn with params set to %Plug.Conn.Unfetched{}. By using Map.put on that, you don't reset the value of __struct__, but only add a new key:
%Plug.Conn{ ...,
params: %{:__struct__ => Plug.Conn.Unfetched, :aspect => :params,
"ip" => "8.8.8.8"}, ... }
When you call params["geo"] later, Elixir sees that params is a struct, and tries to call the fetch/2 function on the struct's module, which doesn't exist. To reset params to a normal map (so that Elixir calls Map.get when you use the square bracket syntax), you can just do params = %{"ip": "8.8.8.8"}.
I'm working on a custom Chef Cookbook and have defined a custom attribute called default["server"]["apikey"] = nil thats been defined within the cookbook in a separate attributes file that looks like this:
#Default Attributes
default["webhooks"]["get_response"] = ""
default["webhooks"]["put_response"] = ""
default["webhooks"]["post_response"] = ""
default["server"]["username"] = "user"
default["server"]["password"] = "123"
default["server"]["apikey"] = nil
Within my recipe I then do this:
webhooks_request "Request an API key from TPP " do
uri "172.16.28.200/sdk/authorize/"
post_data (
{ 'Username' => node["server"]["username"], 'Password' => node["server"]["password"]}
)
header_data (
{ 'content-type' => 'application/json'}
)
expected_response_codes [ 200, 201, 400 ]
action :post
end
I then follow this with ruby_block that updates the value of the ``default["server"]["apikey"]` attribute with the API key like this:
ruby_block "Extract the API Key" do
block do
jsonData = JSON.parse(node["webhooks"]["post_response"])
jsonData.each do | k, v |
if k == 'APIKey'
node.overide["server"]["apikey"] = v
end
end
end
action :run
end
I can then validate it using this:
ruby_block "Print API Key" do
block do
print "\nKey = : " + node["server"]["apikey"] + "\n"
end
action :run
end
However, if I then try to use the node["server"]["apikey"] attribute in a following block like this:
webhooks_request "Get data from TPP" do
uri "127.0.0.1/vedsdk/certificates/retrieve?apikey=#{node["server"]["apikey"]}"
post_data (
{ 'data' => "NsCVcQg4fd"}
)
header_data (
{ 'content-type' => 'application/json', 'auth' => node["server"] ["username"]}
)
expected_response_codes [ 200, 201, 400, 401 ]
action :post
end
The value of node["server"]["apikey"]} is always empty. Interestingly though the value of the node["server"] ["username"] attribute is available and works as expected.
Obviously, I'm missing something here buy can't work out what :(
Writing it as a generic answer (it will avoid keeping it unanswered in list too ;))
When inside a resource you may evaluate an attribute value at converge time with lazy attribute evaluation.
The correct usage is
resource "name" do
attribute lazy {"any value #{with interpolation} inside"}
end
The common error is to use lazy inside interpolation as we only want the variable to be lazy evaluated and there's only one.
By design lazy is meant to evaluate the attribute value, it can contain Ruby code to compute the value from something done by a previous resource too.
Why an fieldless ContentPart is not included in a ContentItem?
Here are code from Migrations.cs:
SchemaBuilder.CreateTable("ImageDescribedPartRecord",
table => table.ContentPartRecord());
ContentDefinitionManager.AlterPartDefinition(
"ImageDescribedPart",
cpd => cpd.WithField(
"Image",
b => b
.OfType("MediaPickerField")
.WithSetting("MediaPickerFieldSettings.Required", "false")));
ContentDefinitionManager.AlterTypeDefinition(
"PlantPicture",
cfg => cfg
.WithPart("ImageDescribedPart")
.WithPart("CommonPart", p => p.WithSetting("OwnerEditorSettings.ShowOwnerEditor", "false"))
.WithPart("BodyPart")
.WithPart("TitlePart")
.WithPart("AutoroutePart"
, builder =>
builder
.WithSetting("AutorouteSettings.AllowCustomPattern", "false")
.WithSetting("AutorouteSettings.AutomaticAdjustmentOnEdit", "true")
.WithSetting("AutorouteSettings.PatternDefinitions", "[{Name:'Container', Pattern: '{Content.Container.Path}/images/{Content.Slug}', Description: 'apgii/taxon/sub-taxon/images/title'}]")
.WithSetting("AutorouteSettings.DefaultPatternIndex", "0"))
.WithPart("ContainablePart")
.Creatable(true)
// todo: The following doesn't work. Make it work.
.WithSetting("BodyPartSettings.FlavorDefault", "text")
);
Here are code for the ContentPart:
public class ImageDescribedPart : ContentPart<ImageDescribedPartRecord>{
}
public class ImageDescribedPartRecord :ContentPartRecord {}
The following code from a driver
IContentQuery<ContentItem> query = _contentManager
.Query(VersionOptions.Published)
.Join<CommonPartRecord>()
.Where(cr => cr.Container.Id == container.Id);
var items = query.Slice(0, 10).ToList();
IEnumerable<Zulatm.WebPlants.Models.ImageDescribedPart> firstImages = items.AsPart<ImageDescribedPart>();
Logger.Debug("Items count: {0}", items.Count());
for (int i = 0; i < items.Count(); i++) {
Logger.Debug("Item {0}: {1}", i, items[i].As<TitlePart>().Title);
}
Logger.Debug("Images count: {0}",firstImages.Count());
Returns the following
2012-12-07 16:28:45,616 [35] TaxonomyNodePartDriver - Items count: 2
2012-12-07 16:28:45,617 [35] TaxonomyNodePartDriver - Item 0: Test
2012-12-07 16:28:45,619 [35] TaxonomyNodePartDriver - Item 1: test img 2
2012-12-07 16:28:45,619 [35] TaxonomyNodePartDriver - Images count: 0
You "cast" the item to a part and store it as an IEnumerable. I wonder how this doesn't cause a compile error, because this is wrong.
Most possibly the root of your problems is that if you plan to use a part directly by "casting", the part should have a corresponding driver (can be empty).
Ok. The problem was there was no Filter added in Handler. Have added ActivatorFilter:
public ImageDescribedHandler (IRepository<ImageDescribedPartRecord> repository, INavigationManager navigationManager)
{
Filters.Add(new ActivatingFilter<ImageDescribedPart>("PlantPicture"));
}
Also deleted the line for creating a table.
Why aren't :q params passing to the controller?!
Here's the controller code:
def advanced_search
#search = Isbn.where(:client_id => current_user.client_id).search(params[:q])
#search.build_grouping unless #search.groupings.any?
#isbns = params[:distinct].to_i.zero? ? #search.result : #search.result(distinct: true)
respond_with #isbns
end
Here's application_helper with the build_grouping bit it:
def setup_search_form(builder)
fields = builder.grouping_fields builder.object.new_grouping, object_name: 'new_object_name', child_index: "new_grouping" do |f|
render('grouping_fields', f: f)
end
Here's the form:
<%= search_form_for #search, url: advanced_search_isbns_path, html: {method: :post} do |f| %>
<% setup_search_form f %>
#...
<%= render 'results' %>
Here's the trace:
Processing by IsbnsController#advanced_search as HTML
Parameters: {"utf8"=>"✓", "authenticity_token"=>"P074clk3UDe3cNNEG6IZ6TXm2q+fr8dMJfJwSBSx/1c=", "q"=>{"g"=>{"0"=>{"m"=>"or"}}}, "commit"=>"Search"}
Here's the source of search_form_for:
# File 'lib/ransack/helpers/form_helper.rb', line 4
def search_form_for(record, options = {}, &proc)
if record.is_a?(Ransack::Search)
search = record
options[:url] ||= polymorphic_path(search.klass)
elsif record.is_a?(Array) && (search = record.detect {|o| o.is_a?(Ransack::Search)})
options[:url] ||= polymorphic_path(record.map {|o| o.is_a?(Ransack::Search) ? o.klass : o})
else
raise ArgumentError, "No Ransack::Search object was provided to search_form_for!"
end
options[:html] ||= {}
html_options = {
:class => options[:as] ? "#{options[:as]}_search" : "#{search.klass.to_s.underscore}_search",
:id => options[:as] ? "#{options[:as]}_search" : "#{search.klass.to_s.underscore}_search",
:method => :get
}
options[:as] ||= 'q'
options[:html].reverse_merge!(html_options)
options[:builder] ||= FormBuilder
form_for(record, options, &proc)
end
Here's the source of search:
# File 'lib/ransack/search.rb', line 16
def initialize(object, params = {}, options = {})
params ||= {}
#context = Context.for(object)
#context.auth_object = options[:auth_object]
#base = Nodes::Grouping.new(#context, 'and')
build(params.with_indifferent_access)
end
In fact, here's the whole chuffing lot.
Plus: all the routes for the model, included for completeness:
resources :isbns, only: :index do
match 'advanced_search' => 'isbns#advanced_search',
on: :collection, via: [:get, :post], as: :advanced_search
end
resources :isbns do
get :profit, :on => :member
get :workings, :on => :member
put :copyisbn, :on => :member
get :onixxml, :on => :member
collection do
post 'edit_multiple'
put 'update_multiple'
get 'onix'
get 'add_subschemes'
get 'xmp'
get 'profitindex'
delete 'destroy_multiple'
end
end
The models file is very busy, so do ask for more, but here are some of the associations:
Isbn.rb:
attr_accessible :marketingtexts_attributes, :prices_attributes #etc
has_many :marketingtexts, :dependent => :destroy
has_many :supplies, :dependent => :destroy
Marketingtext.rb
attr_accessible :user_id, :created_at, :legacycode, :updated_at, :client_id, :isbn_id, #etc
belongs_to :isbn
before_save :map_legacy_codes
validates :client_id, :presence => true
Supply.rb
attr_accessible :user_id, :client_id, :isbn_id, :prices_attributes, #etc
belongs_to :isbn
has_many :supplydetails, :dependent => :destroy
has_many :prices, :through => :supplydetails
accepts_nested_attributes_for :supplydetails
I can run the cloned ransack demo code on my machine. In my own app, I've excluded all the javascript except jquery and the search script. I'm increasingly thinking it's a conflict with another gem or some bit of code.