I have the following models set up
# task.rb
class Task << AR
# everything all task objects have in common
end
# login_request.rb
class Tasks::LoginRequest < Task
store :data, accessors: [:email, :first_name, :last_name, :expires_at]
composed_of :valid_until, class_name: 'DateTime', mapping: %w(expires_at to_s), constructor: Proc.new { |date| (date && date.to_datetime) || DateTime.now }, converter: Proc.new { |value| value.to_s.to_datetime }
end
I'm using the datetime_select helper in my form:
# _form.html.haml
= f.datetime_select :valid_until
This works quite well, but when I call update in my controller with the submitted form data I get the following error message:
1 error(s) on assignment of multiparameter attributes [error on assignment [2014, 4, 2, 9, 48] to valid_until (can't write unknown attribute 'expires_at')]
So, I'm guessing the updated method tries to manipulate the attributes hash directly, but obviously it can't find the attribute expires_at, since it's a simple accessor method of the JSON column data.
I know I could simply add this field to the DB and it would probably work - although there's no need then to have a composed_of statement. But I'd rather not go this route, because not every task has a expires_at column.
How can I overcome this error? Or did I miss something?
Currently compose_of is not supporting this scenario since it writes directly to attributes that are assumed to be in the database. I wrote a tweaked compose_of version that does (based of Rails 4.0.2 version)
Putting this in initialize folder:
#/initialize/support_store_in_composed_of.rb
module ActiveRecord
module Aggregations
extend ActiveSupport::Concern
def clear_aggregation_cache #:nodoc:
#aggregation_cache.clear if persisted?
end
module ClassMethods
def composed_of_with_store_support(part_id, options = {})
options.assert_valid_keys(:class_name, :mapping, :allow_nil, :constructor, :converter, :store)
name = part_id.id2name
class_name = options[:class_name] || name.camelize
mapping = options[:mapping] || [ name, name ]
mapping = [ mapping ] unless mapping.first.is_a?(Array)
allow_nil = options[:allow_nil] || false
constructor = options[:constructor] || :new
converter = options[:converter]
reader_method(name, class_name, mapping, allow_nil, constructor, options[:store])
writer_method(name, class_name, mapping, allow_nil, converter, options[:store])
create_reflection(:composed_of, part_id, nil, options, self)
end
private
def reader_method(name, class_name, mapping, allow_nil, constructor, store=nil)
define_method(name) do
if #aggregation_cache[name].nil? && (!allow_nil || mapping.any? {|pair| !read_attribute(pair.first).nil? })
if store.present?
attrs = mapping.collect {|pair| send(pair.first)}
else
attrs = mapping.collect {|pair| read_attribute(pair.first)}
end
object = constructor.respond_to?(:call) ?
constructor.call(*attrs) :
class_name.constantize.send(constructor, *attrs)
#aggregation_cache[name] = object
end
#aggregation_cache[name]
end
end
def writer_method(name, class_name, mapping, allow_nil, converter, store=nil)
define_method("#{name}=") do |part|
klass = class_name.constantize
unless part.is_a?(klass) || converter.nil? || part.nil?
part = converter.respond_to?(:call) ? converter.call(part) : klass.send(converter, part)
end
if part.nil? && allow_nil
mapping.each { |pair| self[pair.first] = nil }
#aggregation_cache[name] = nil
else
if store.present?
mapping.each { |pair| send("#{pair.first}=", part.send(pair.last)) }
else
mapping.each { |pair| self[pair.first] = part.send(pair.last) }
end
#aggregation_cache[name] = part.freeze
end
end
end
end
end
end
And using it like this would solve your problem.
class Task < ActiveRecord::Base
store :data, accessors: [:email, :first_name, :last_name, :expires_at]
composed_of_with_store_support :valid_until, class_name: 'DateTime', mapping: %w(expires_at to_s),
constructor: Proc.new { |date| (date && date.to_datetime) || DateTime.now },
converter: Proc.new { |value| value.to_s.to_datetime },
store: true
end
Related
The issue
I'm trying out great expectations with dagster, as per this guide
My pipeline seems to execute correctly until it reaches this block:
expectation = dagster_ge.ge_validation_op_factory(
name='ge_validation_op',
datasource_name='dev.data-pipeline-data-storage.data_pipelines.raw_data.sirene_update',
suite_name='suite.data_pipelines.raw_data.sirene_update',
)
if expectation["success"]:
print("Success")
trying to call expectation["success"] results in a
# TypeError: 'SolidDefinition' object is not subscriptable
When I go inside the code of ge_validation_op_factory, there is a _ge_validation_fn that should yield ExpectationResult, but somehow it gets coverted into a SolidDefinition...
Dagster version = 0.15.9;
Great Expectations version = 0.15.44
Code to reproduce the error
In my code, I am trying to interact with an s3 bucket, so it would be a bit tedious to re-create the code for my example but here it is anyway:
In a gx_postprocessing.py
import json
import boto3
import dagster_ge
from dagster import (
op,
graph,
Field,
String,
OpExecutionContext,
)
from typing import List, Dict
#op(
config_schema={
"bucket": Field(
String,
description="s3 bucket name",
),
"path_in_s3": Field(
String,
description="Prefix representing the path to data",
),
"technical_date": Field(
String,
description="date string to fetch data",
),
"file_name": Field(
String,
description="file name that contains the data",
),
}
)
def read_in_json_datafile_from_s3(context: OpExecutionContext):
bucket = context.op_config["bucket"]
path_in_s3 = context.op_config["path_in_s3"]
technical_date = context.op_config["technical_date"]
file_name = context.op_config["file_name"]
object = f"{path_in_s3}/" f"technical_date={technical_date}/" f"{file_name}"
s3 = boto3.resource("s3")
content_object = s3.Object(bucket, object)
file_content = content_object.get()["Body"].read().decode("utf-8")
json_content = json.loads(file_content)
return json_content
#op
def process_example_dq(data: List[Dict]):
return len(data)
#op
def postprocess_example_dq(numrows, expectation):
if expectation["success"]:
return numrows
else:
raise ValueError
#op
def validate_example_dq(context: OpExecutionContext):
expectation = dagster_ge.ge_validation_op_factory(
name='ge_validation_op',
datasource_name='my_bucket.data_pipelines.raw_data.example_update',
suite_name='suite.data_pipelines.raw_data.example_update',
)
return expectation
#graph(
config={
"read_in_json_datafile_from_s3": {
"config": {
"bucket": "my_bucket",
"path_in_s3": "my_path",
"technical_date": "2023-01-24",
"file_name": "myfile_20230124.json",
}
},
},
)
def example_update_evaluation():
output_dict = read_in_json_datafile_from_s3()
nb_items = process_example_dq(data=output_dict)
expectation = validate_example_dq()
postprocess_example_dq(
numrows=nb_items,
expectation=expectation,
)
Do not forget to add great_expectations_poc_pipeline to your __init__.py where the pipelines=[..] are listed.
In this example, dagster_ge.ge_validation_op_factory(...) is returning an OpDefinition, which is the same type of thing as (for example) process_example_dq, and should be composed in the graph definition the same way, rather than invoked within another op.
So instead, you'd want to have something like:
validate_example_dq = dagster_ge.ge_validation_op_factory(
name='ge_validation_op',
datasource_name='my_bucket.data_pipelines.raw_data.example_update',
suite_name='suite.data_pipelines.raw_data.example_update',
)
Then use that op inside your graph definition the same way you currently are (i.e. expectation = validate_example_dq())
def import_update
require 'csv'
file = params[:file]
CSV.foreach(file.path, headers: true) do |row|
#prod = Spree::Product.find(row["id"])
#var = Spree::Variant.find_by(product_id: #prod.id)
Spree::Product.where(:id => row["id"]).update_all(:name => row["name"] if !row[name].nil?.present?, :meta_description => row["meta_description"], :shipping_category_id => row["shipping_category_id"], :description => row["description"], :meta_keywords => row["meta_keywords"], :tax_category_id => row["tax_category_id"], :available_on => row["available_on"], :deleted_at => row["deleted_at"], :promotionable => row["promotionable"], :meta_title => row["meta_title"], :featured => row["featured"], :supplier_id => row["supplier_id"])
end
end
I want check that row is present or not. if it is present then it updated when it is not null and condition is in single line because I want to apply this for all variable in updation statement.I wrote code above but showing error.
Try this:
params = ["name","meta_description","shipping_category_id","description","meta_keywords","tax_category_id","available_on","deleted_at","promotionable","meta_title","featured","supplier_id"
hash = {}
params.each do |param|
if row[param]
hash[param] = row[param]
end
end
Spree::Product.where(:id => row["id"]).update_attributes(hash)
This will let you keep your code dry.
EDIT:
What are these lines supposed to do?:
#prod = Spree::Product.find(row["id"])
#var = Spree::Variant.find_by(product_id: #prod.id)
I presume you don't have several entries with one ID. And your not using the objects that you retrieved in those two lines, so simply write the method like this:
def import_update
require 'csv'
file = params[:file]
params = ["name","meta_description","shipping_category_id","description","meta_keywords","tax_category_id","available_on","deleted_at","promotionable","meta_title","featured","supplier_id"]
CSV.foreach(file.path, headers: true) do |row|
hash = {}
params.each do |param|
if row[param]
hash[param] = row[param]
end
end
Spree::Product.find(row["id"]).update_all(hash)
end
end
I am new to Ruby-on-Rails 4. I have created a custom validator but cannot assign value to an array. It shows error
undefined method <<' for nil:NilClass.
It highlights #msg << 1
For instance, my model is like
class User < ActiveRecord::Base
has_secure_password
validates :email,:email_format => true, :on => :create
validates :password, password_format:{with: "upercase"}
end
My custom validator
class PasswordFormatValidator < ActiveModel::EachValidator
#def initilize(options)-Fixed
def initialize(options)
#msg=[]
#password1 = options[:attributes=>[:password]]
#val=options.inspect
super
end
def validate_each(record, attribute, value)
record.errors[attribute] << #val
unless (value.nil? || value.empty?)
#msg << 1
#record.errors[attribute] << "testing"
end
end
end
#val output
{:attributes=>[:password], :complexity=>3, :length=>6, :class=>User(id: integer, email: string, password_digest: string, created_at: datetime, updated_at: datetime)}
You have a typo in your constructor name, it should be initialize and not initilize. This is why your #msg variable is undefined - your constructor has never been called!
Have a nice day.
I'm stuck on this:
I need to populate data into my app.
I'm using Promotion for the very first time....
Without ProMotion I use to fetch the data in the init method
Now my code looks like below:
class Parties < ProMotion::TableScreen
attr_accessor :_cells
#news = []
include MyUiModules
title 'Yazarlar'
refreshable callback: :on_refresh,
pull_message: "Pull to refresh",
refreshing: "Refreshing data…",
updated_format: "Last updated at %s",
updated_time_format: "%l:%M %p"
def on_refresh
#MyItems.pull_from_server do |items|
##my_items = items
end_refreshing
#update_table_data
# end
end
def table_data
_cells = []
[{
title: nil,
cells: create_cells(_cells)
}]
end
def will_appear
Barbutton.create_bar(self)
set_attributes self.view, {
backgroundColor: hex_color("DBDBDB")
}
end
def go_to_next
App.delegate.slide_menu.show_menu
end
def create_cells(_cells)
BW::HTTP.get(URL) do |response|
json = BW::JSON.parse response.body.to_str
for line in json
_cells << { title: line["val_for_title"]}
end
end
_cells
end
end
Unfotunately this does return an empty array, and I can't figure out how to solve it.
Thx for your help
You can't do that because BW::HTTP.get is asynchronous !
Instead try something like this:
def on_init
#data = []
end
def table_data
[
{
title: nil,
cells: #data
}
]
end
def on_refresh
BW::HTTP.get(URL) do |response|
#data = []
json = BW::JSON.parse(response.body.to_str)
json.each do |hash|
#data << { title: hash["val_for_title"]}
end
update_table_data
end_refreshing
end
end
Hope it helps :-)
I have a model in django admin as follows
ChoiceA= (
("on-false","on-false"),
("on-true","on-true"),
)
ChoiceB = (
("always","always"),
("never","never"),
)
id = models.CharField(verbose_name="Field",max_length=32)
type = models.CharField(verbose_name="Expression",max_length=32)
action = models.CharField(max_length=32, choices=x)
Now based on the type entered by the user ie if user enters type = "a" then action's choices should be set to ChoiceA and if user enters type ="b" then action's choices should be set to ChoiceB. How can I achieve this in Django Admin?
Edit:
action_change.js
jQuery(document).ready(function(){
$("#id_type").change( function(event) {
$.ajax({
"type" : "POST",
"url" : "/action_choices/",
"dataType" : "json",
"cache" : false,
"error" : alert("hello"),
"success" : function(json) {
$('#id_action >option').remove();
for(var j = 0; j < json.length; j++){
$('#id_action').append($('<option></option>').val(json[j][0]).html(json[j][1]));
}
}
});
});
});
You can achieve it using Ajax and jQuery:
models.py:
type = models.CharField(verbose_name="Expression",max_length=32)
action = models.CharField(max_length=32, choices = (('', ''), ))
admin.py:
class MyModelAdmin(admin.ModelAdmin):
list_display = ('type', )
class Media:
js = ['/static/js/action_change.js']
admin.site.register(MyModel, MyModelAdmin)
urls.py:
url(r'^action_choices/', 'myproject.myapp.views.action_choices'),
views.py:
def action_choices(request):
action_list = []
ChoiceA = ("on-false", "on-true")
ChoiceB = ("always", "never")
action_type = request.GET.get('action_type')
if str(action_type).lower() == 'a':
choices = ChoiceA
elif str(action_type).lower() == 'b':
choices = ChoiceB
else:
choices = ()
[action_list.append((each,each)) for each in choices]
json = simplejson.dumps(action_list)
return HttpResponse(json, mimetype='application/javascript')
Create the file action_change.js with following content in your static folder and define correct path in class Media of ModelAdmin.
action_change.js
(function($){
$(function(){
$(document).ready(function() {
$('#id_type').bind('keyup', type_change);
$('#id_action >option').show();
});
});
})(django.jQuery);
// based on the type, action will be loaded
var $ = django.jQuery.noConflict();
function type_change()
{
var action_type = $('#id_type').val();
$.ajax({
"type" : "GET",
"url" : "/action_choices/?action_type="+action_type,
"dataType" : "json",
"cache" : false,
"success" : function(json) {
$('#id_action >option').remove();
for(var j = 0; j < json.length; j++){
$('#id_action').append($('<option></option>').val(json[j][0]).html(json[j][1]));
}
}
})(jQuery);
}
This should work fine for the scenario you asked. And I'm giving my suggestion below:
models.py
type = models.CharField(verbose_name="Expression",max_length=32, choices = (('a', 'a'), ('b', 'b'), ))
action = models.CharField(max_length=32, choices = (('', ''), ))
action_change.js (line 5)
$('#id_type').bind('change', type_change);
You would have to initialize the action field with all possible choices, or Django will complain that a choice that didn't previously exist isn't a valid choice.
My recommendation would be to initialize the field with all of the possible choices, and use JavaScript to toggle the visibility of the choices, depending on the value of type. There are a few plugins around that will handle dynamic fields in Django admin, but most that I've seen deal with ForeignKey or ManyToMany fields that need to do lookups.
You're probably best off just adding some JavaScript to your admin form via the Media meta class and handling it yourself.