I'm relatively new to Ember and EmberFire. I'm working on a Client/Logo management application. I've currently got Firebase Authentication and Firebase data working as expected. When I go to upload the logo to Firebase storage, I am presented with this error:
Uncaught Error: No Firebase App '[DEFAULT]' has been created - call Firebase App.initializeApp(). firebase.js:30
Here is the action that is being called:
Controller
firebase: Ember.inject.service( 'firebase' ),
actions: {
createClient(){
let name = this.get( 'clientName' );
let image = document.getElementById( 'client-image' );
let storeName = name.replace( / /g, '' );
let storageRef = firebase.storage().ref();
let file = image.files[0];
let metadata = {
'contentType' : file.type
};
let uploadTask = storageRef.child( `uploads/${storeName}/${file.name}` ).put( file, metadata );
uploadTask.on( 'state_changed', null, function( error ){
console.error( 'Upload Failed:', error );
}, function(){
console.log( 'Uploaded', uploadTask.snapshot.totalBytes, 'bytes.' );
console.log( uploadTask.snapshot.metadata );
let uploadUrl = uploadTask.snapshot.metadata.downloadURLs[0];
console.log( 'File available at ', url );
let client = this.store.createRecord( 'client', {
name: name,
image: uploadUrl,
isActive: false,
timestamp: new Date().getTime()
} );
client.save();
} );
// Tell the route to hide the client form.
this.send( 'hideAddClientForm' );
}
}
Template
<div id="new-overlay" class="overlay" {{action "hideAddClientForm"}}></div>
<div id="newClient">
{{input type="text" id="client-name" placeholder="Client Name" value=clientName}}<br />
{{input type="file" id="client-image" value=clientImage}}
<div class="submitBtn" {{action "createClient"}}>Add Client</div>
</div>
So, in short, how do I access the Firebase reference provided by EmberFire so that I can invoke the "storage()" method to it that is shown here in the Quickstart. If access to that reference isn't possible, do I have to create another, "non-EmberFire" reference to Firebase in order to use Storage? I'm simply trying to uploading a .jpg.
Make sure you are using EmberFire 2.0. You can access the Firebase Storage API through the firebaseApp service:
firebaseApp: Ember.inject.service(),
actions: {
doSomething() {
const storageRef = this.get('firebaseApp').storage();
}
}
The firebaseApp service is an already initialized app, not to be confused with the firebase service, which is the database reference only (kept for backwards compatibility).
Related
I have a simple "Sign in with Google" app where only internal users of google workspace can sign in. e.g.
<html>
<body>
<script src="https://accounts.google.com/gsi/client" async defer></script>
<div id="g_id_onload"
data-client_id="CLIENT_ID"
data-context="signin"
data-ux_mode="popup"
data-callback="handleCredentialResponse">
</div>
<div class="g_id_signin"
data-type="standard"
data-shape="rectangular"
data-theme="outline"
data-text="signin_with"
data-size="large"
data-logo_alignment="left">
</div>
<script>
function handleCredentialResponse(response) {
console.log(response.credential)
const decodedJwt = decodeJwt(response.credential);
console.log(decodedJwt)
console.log("ID: " + decodedJwt.sub);
console.log('Full Name: ' + decodedJwt.name);
console.log('Given Name: ' + decodedJwt.given_name);
console.log('Family Name: ' + decodedJwt.family_name);
console.log("Image URL: " + decodedJwt.picture);
console.log("Email: " + decodedJwt.email);
}
function decodeJwt(token) {
const base64Url = token.split('.')[1];
const base64 = base64Url.replace('-', '+').replace('_', '/');
return JSON.parse(window.atob(base64));
}
</script>
</body>
</html>
Requirement is to get custom claims in JWT/idToken (based on custom user attributes or groups to support RBAC on API gateway). What's the best way to achieve that?
This is what I have tried.
I followed this adding-custom-roles-to-jwt-on-login-with-google-identity-platform article and linked beforeSignIn hook to a cloud function that is returning a hard-coded custom claim
const gcipCloudFunctions = require('gcip-cloud-functions');
const authClient = new gcipCloudFunctions.Auth();
exports.beforeSignIn = authClient.functions().beforeSignInHandler((user, context) => {
console.log({
user,
context
});
return {
customClaims: {
"roleCustomClaim": "SomeRole"
}
};
});
This approach doesn't work with gsi (google identity services) client snippet that I shared above. Cloud function isn't executed (verified using logs) Why is that?
I added "Google" Identity Provider in Identity Platform's provider tab and configured it with same internal web client used in above snippet.
<html>
<body>
<script src="https://www.gstatic.com/firebasejs/8.0/firebase.js"></script>
<script>
var config = // config copied from identity platform's "Application setup details"
firebase.initializeApp(config);
const provider = new firebase.auth.GoogleAuthProvider();
const auth = firebase.auth();
firebase.auth()
.signInWithPopup(provider)
.then((result) => {
console.log(result)
/** #type {firebase.auth.OAuthCredential} */
var credential = result.credential;
const idToken = credential.idToken;
console.log(idToken)
const decodedJwt = decodeJwt(idToken);
console.log(decodedJwt)
// This gives you a Google Access Token. You can use it to access the Google API.
var token = credential.accessToken;
// The signed-in user info.
var user = result.user;
console.log(user)
// ...
}).catch((error) => {
// Handle Errors here.
var errorCode = error.code;
var errorMessage = error.message;
// The email of the user's account used.
var email = error.email;
// The firebase.auth.AuthCredential type that was used.
var credential = error.credential;
// ...
});
function decodeJwt(token) {
const base64Url = token.split('.')[1];
const base64 = base64Url.replace('-', '+').replace('_', '/');
return JSON.parse(window.atob(base64));
}
</script>
</body>
</html>
Now cloud function is being executed with 200 status on user login but I am still not getting custom claim in jwt/idToken. Any idea what I am doing wrong?
I can see a warning "Note: Blocking functions are only available for use with Identity Platform. They are not supported with Firebase Authentication." on Customizing the authentication flow using blocking functions but this is confusing since Docs and even Application setup details of Google Identity platform points to firebase and is using firebase SDK.
token in the result of result.user.getIdTokenResult() method contains custom claims added in authClient.functions().beforeSignInHandler handler in GCP cloud function. I was checking result.credential.idToken which doesn't contain any custom custom claims.
Another much better method to pass custom claims is to use Google workspace SAML app integration with Google Identity Platform. This way can pass any Google Directory attribute (built-in or custom) to Identity platform without creating any cloud function (Although this approach still supports extension via cloud functions)
Example jwt with custom claims (where you can see stackoverflowRole in sign_in_attributes provided by our SAML provider which is google workspace):
{
"iss": "https://securetoken.google.com/some-project-123456",
"aud": "some-project-123456",
"auth_time": 1657706938,
"user_id": "someuserid",
"sub": "someuserid",
"iat": 1657706938,
"exp": 1657710538,
"email": "someuser#customdomain.com",
"email_verified": true,
"firebase": {
"identities": {
"saml.customdomain.com": [
"someuser#customdomain.com"
],
"email": [
"someuser#customdomain.com"
]
},
"sign_in_provider": "saml.customdomain.com",
"sign_in_attributes": {
"firstName": "Abdul",
"lastName": "Rauf",
"groups": "custom-superuser",
"stackoverflowRole": "superuser"
}
}
}
Reference:
uploading files from strapi to s3 works fine.
I am trying to secure the files by using signed urls:
var params = {Bucket:process.env.AWS_BUCKET, Key: `${path}${file.hash}${file.ext}`, Expires: 3000};
var secretUrl = ''
S3.getSignedUrl('getObject', params, function (err, url) {
console.log('Signed URL: ' + url);
secretUrl = url
});
S3.upload(
{
Key: `${path}${file.hash}${file.ext}`,
Body: Buffer.from(file.buffer, 'binary'),
//ACL: 'public-read',
ContentType: file.mime,
...customParams,
},
(err, data) => {
if (err) {
return reject(err);
}
// set the bucket file url
//file.url = data.Location;
file.url = secretUrl;
console.log('FIle URL: ' + file.url);
resolve();
}
);
file.url (secretUrl) contains the correct URL which i can use in browser to retrieve the file.
But whenever reading the file form strapi admin panel no file nor tumbnail is shown.
I figured out that strapi adds a parameter to the file e.g ?2304.4005 which corrupts the get of the file to AWS. Where and how do I change that behaviour
Help is appreciated
Here is my solution to create a signed URL to secure your assets. The URL will be valid for a certain amount of time.
Create a collection type with a media field, which you want to secure. In my example the collection type is called invoice and the media field is called document.
Create an S3 bucket
Install and configure strapi-provider-upload-aws-s3 and AWS SDK for JavaScript
Customize the Strapi controller for your invoice endpoint (in this exmaple I use the core controller findOne)
const { sanitizeEntity } = require('strapi-utils');
var S3 = require('aws-sdk/clients/s3');
module.exports = {
async findOne(ctx) {
const { id } = ctx.params;
const entity = await strapi.services.invoice.findOne({ id });
// key is hashed name + file extension of your entity
const key = entity.document.hash + entity.document.ext;
// create signed url
const s3 = new S3({
endpoint: 's3.eu-central-1.amazonaws.com', // s3.region.amazonaws.com
accessKeyId: '...', // your accessKeyId
secretAccessKey: '...', // your secretAccessKey
Bucket: '...', // your bucket name
signatureVersion: 'v4',
region: 'eu-central-1' // your region
});
var params = {
Bucket:'', // your bucket name
Key: key,
Expires: 20 // expires in 20 seconds
};
var url = s3.getSignedUrl('getObject', params);
entity.document.url = url // overwrite the url with signed url
return sanitizeEntity(entity, { model: strapi.models.invoice });
},
};
It seems like although overwriting controllers and lifecycle of the collection models and strapi-plugin-content-manager to take into account the S3 signed urls, one of the Strapi UI components adds a strange hook/refs ?123.123 to the actual url that is received from the backend, resulting in the following error from AWS There were headers present in the request which were not signed when trying to see images from the CMS UI.
Screenshot with the faulty component
After digging the code & node_modules used by Strapi, it seems like you will find the following within strapi-plugin-upload/admin/src/components/CardPreview/index.js
return (
<Wrapper>
{isVideo ? (
<VideoPreview src={url} previewUrl={previewUrl} hasIcon={hasIcon} />
) : (
// Adding performance.now forces the browser no to cache the img
// https://stackoverflow.com/questions/126772/how-to-force-a-web-browser-not-to-cache-images
<Image src={`${url}${withFileCaching ? `?${cacheRef.current}` : ''}`} />
)}
</Wrapper>
);
};
CardPreview.defaultProps = {
extension: null,
hasError: false,
hasIcon: false,
previewUrl: null,
url: null,
type: '',
withFileCaching: true,
};
The default is set to true for withFileCaching, which therefore appends the const cacheRef = useRef(performance.now()); query param to the url for avoiding browser caches.
By setting it to false, or leaving just <Image src={url} /> should solve the issue of the extra query param and allow you to use S3 signed URLs previews also from Strapi UI.
This would also translate to use the docs https://strapi.io/documentation/developer-docs/latest/development/plugin-customization.html to customize the module strapi-plugin-upload in your /extensions/strapi-plugin-upload/...
i'm facing a problem trying to make a request to API with address different against client.
client app lives at http://localhost:8080
server app lives at http://localhost:4000
in main.js i'm creating apollo client
const apolloClient = new ApolloClient({
uri: 'http://localhost:4000/v1/graphql',
})
const apolloProvider = new VueApollo({
defaultClient: apolloClient,
})
and feed the apolloProvider variable to Vue.
in component code that's calling API endpoint is looking like this
<template>
<div>{{ categories }}</div>
</template>
<script>
import gql from 'graphql-tag'
export default {
apollo: {
categories: gql`query {
categories {
name
_id
}
}`
}
}
</script>
my GraphQL server that should accept the query from VueApollo is looking like this
// apollo
const { ApolloServer, makeExecutableSchema } = require('apollo-server')
const typeDefs = require('./schema')
const resolvers = require('./resolvers')
const schema = makeExecutableSchema({
typeDefs,
resolvers,
})
const server = new ApolloServer({
schema,
cors: {
origin: 'http://localhost:8080',
methods: 'POST',
optionsSuccessStatus: 204,
preflightContinue: false,
},
})
server.listen({ port: process.env.PORT || 4000 }).then(({ url }) => {
console.log(`🚀 app running at ${url}`)
})
in Chrome browser requests from VueApollo accepted and response returned appropriately, but in FireFox i'm getting a CORS errors like this
am i missing anything guys? please help!
i'm not sure what was wrong, but while i was trying to find out a solution i've noticed that my vue-cli module out of date. for me it was 3.1.1. so i updated vue cli to 4.5.9 and it got worked.
i have my website in ruby on rails 5 and i'm updating my stripe payment gateway with this link but clicking my button doesn't redirect me to the stripe checkout, this is what i have in the controller:
def index
Stripe.api_key = Rails.configuration.stripe[:secret_key]
session = Stripe::Checkout::Session.create(
payment_method_types: ['card'],
line_items: [{
price: 'price_1HKywnBS16ZK5Vr3GYCRvTir',
quantity: 1,
}],
mode: 'subscription',
success_url: 'https://www.my_site.network/success?session_id={CHECKOUT_SESSION_ID}',
cancel_url: 'https://www.my_site.network/cancel',
)
end
I think the error may be when replacing the session id in the javascript code of my index view:
<button id="checkout-button">Pay</button>
<script src="https://js.stripe.com/v3/"></script>
<script type="text/javascript">
var stripe = Stripe('<%= Rails.configuration.stripe[:publishable_key] %>');
var checkoutButton = document.getElementById('checkout-button');
checkoutButton.addEventListener('click', function() {
stripe.redirectToCheckout({
// Make the id field from the Checkout Session creation API response
// available to this file, so you can provide it as argument here
// instead of the {{CHECKOUT_SESSION_ID}} placeholder.
sessionId: '<%=session.id%>'
}).then(function (result) {
// If `redirectToCheckout` fails due to a browser or network
// error, display the localized error message to your customer
// using `result.error.message`.
});
});
</script>
you don't have to use '<%=session.id%>'.
The value that you must to use there is the entire value that you store in session when you do: Stripe::Checkout::Session.create in your controller.
How should a template lookup the user access to a specific route before displaying a link/action?
Considering the routes already contain a list of authorized roles, should a simple template helper/component lookup the view property and validates access?
( something like {{#if has-access-to 'items.new'}} ? )
Routes are currently "protected" using a simple ACL solution:
AclRouteMixin
import Ember from 'ember';
var accountTypes = {
1: 'member',
2: 'manager',
3: 'owner'
};
export default Ember.Mixin.create({
beforeModel: function beforeModel(transition) {
this._super(transition);
var accountType = this.get('session.accountType');
var role = accountTypes.hasOwnProperty(accountType) ? accountTypes[accountType] : 'unknown';
if (this.get('roles') && !this.get('roles').contains(role)) {
transition.abort();
this.transitionTo('unauthorized');
}
}
});
Route
export default Ember.Route.extend(AuthenticatedRouteMixin, AclRouteMixin, {
roles: [ 'manager', 'owner' ]
});
EDIT
Since the server knows the permissions it is much easier to include a policy object ( or per-entity properties ) than trying to duplicate the authorization logic.
This talk ( linked by MilkyWayJoe ) explains a really simple way to setup authentication / ACL.
The session object ( or each API response ) could contain a Policy object that contains true/false values.
Template
{{#if session.policy.canCreateItems}}{{link-to 'New Item' 'items.new'}}{{/if}}
{{#if item.policy.canEdit}}{{link-to 'Edit' 'items.edit' item}}{{/if}}
Authenticator ( if using ember-simple-auth )
var session = {
userId: response.user_id,
policy: {}
};
for(var p in response.policy) {
if (response.policy.hasOwnProperty(p)) {
session.policy[p.camelize()] = response.policy[p];
}
}
API responses
{
"items": [{
...
policy: {
can_delete: true,
can_view: true,
can_edit: true
}
}],
"policy": {
can_create: true
}
}
The way I would do it is load up the permissions on an auth route that all other routes extend, as for checking it and displaying links I went ahead with a component:
import Ember from 'ember';
var Component = Ember.Component;
export default Component.extend({
hasPermission: function() {
var permission = this.get('permission');
return this.get('auth.permissions').indexOf(permission) !== -1;
}.property('permission')
});
As for the template:
{{#if hasPermission}}
{{yield}}
{{/if}}
And simply call it from links:
{{#can-do permission="view_tables"}}
{{link-to "tables" "tables" class="nav__link"}}
{{/can-do}}
Hope it helps. Let me know if you have any questions.