How to check if a QLDB result is empty using Go? - amazon-web-services

In Go, how can I check if a AWS QLDB result is empty?
In node, the following would do the trick:
txn.execute(statement).then((result: Result) => {
const resultList: dom.Value[] = result.getResultList();
if (resultList.length === 0) {
// DO something
}
)}
I've been reading the official quick start, but examples "assumes the results are not empty":
p, err := driver.Execute(context.Background(), func(txn qldbdriver.Transaction) (interface{}, error) {
result, err := txn.Execute("SELECT firstName, lastName, age FROM People WHERE age = 54")
if err != nil {
return nil, err
}
// Assume the result is not empty
hasNext := result.Next(txn)
if !hasNext && result.Err() != nil {
return nil, result.Err()
}
ionBinary := result.GetCurrentData()
temp := new(Person)
err = ion.Unmarshal(ionBinary, temp)
if err != nil {
return nil, err
}
return *temp, nil
})

Related

How do I add all resource arn's? Lambda Golang ListTags

I am trying to list out all tags of my lambda functions, struggling a lot, please help me if anyone knows.
func main() {
svc := lambda.New(session.New())
input := &lambda.ListTagsInput{
Resource: aws.String("arn:aws:lambda:us-east-1:657907747545:function-function-name"),
I'm expecting to list all tag arn's for my lambda functions
You can use the following code:
package main
import (
"context"
awsutils "github.com/alessiosavi/GoGPUtils/aws"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/lambda"
"os"
"sync"
)
var lambdaClient *lambda.Client = nil
var once sync.Once
func init() {
once.Do(func() {
cfg, err := awsutils.New()
if err != nil {
panic(err)
}
lambdaClient = lambda.New(lambda.Options{Credentials: cfg.Credentials, Region: cfg.Region})
})
}
func ListLambdas() ([]string, error) {
f, err := lambdaClient.ListFunctions(context.Background(), &lambda.ListFunctionsInput{})
if err != nil {
return nil, err
}
var functions = make([]string, len(f.Functions))
for i, functionName := range f.Functions {
functions[i] = *functionName.FunctionName
}
continuationToken := f.NextMarker
for continuationToken != nil {
f, err = lambdaClient.ListFunctions(context.Background(), &lambda.ListFunctionsInput{Marker: continuationToken})
if err != nil {
return nil, err
}
continuationToken = f.NextMarker
for _, functionName := range f.Functions {
functions = append(functions, *functionName.FunctionName)
}
}
return functions, nil
}
func DescribeLambda(lambdaName string) (*lambda.GetFunctionOutput, error) {
function, err := lambdaClient.GetFunction(context.Background(), &lambda.GetFunctionInput{FunctionName: aws.String(lambdaName)})
if err != nil {
return nil, err
}
return function, nil
}
func ListTags(lambdaARN string) (*lambda.ListTagsOutput, error) {
return lambdaClient.ListTags(context.Background(), &lambda.ListTagsInput{
Resource: aws.String(lambdaARN),
})
}
Then you can use the ListLambdas method in order to list all your lambda. After, you can iterate the slice returned and call the DescribeLambda method in order to get the lambdaARN, then you can call the ListTags.
You can refer to the following repository in order to understand how to work with AWS (lambda, S3, glue etc etc) in Golang: https://github.com/alessiosavi/GoGPUtils/tree/master/aws

Go GORM Mocking Begin Expected

I am trying to test a function that gets some details then update details in db. I am using gorm for my ORM and mocking db exec with DATA-DOG/sqlmock. But I keep getting expected begin error but not sure where I am messing up. I tried many variations of the code where I remove the expectBegin expectCommit etc.
Here is the code I am trying to test:
// UpsertUserProfile saves the user if doesn't exists and adds the OAuth profile
// and updates existing user info if record exist in db
func (o *ORM) UpsertUserProfile(gu *goth.User) (*models.User, error) {
db := o.DB
up := &models.UserProfile{}
u, err := models.GothUserToDBUser(gu, false)
if err != nil {
return nil, err
}
tx := db.Where("email = ?", gu.Email).First(u)
if tx.Error != nil && tx.Error != gorm.ErrRecordNotFound {
return nil, tx.Error
}
if tx := db.Model(u).Save(u); tx.Error != nil {
return nil, tx.Error
}
tx = db.Where("email = ? AND provider = ? AND external_user_id = ?", gu.Email, gu.Provider, gu.UserID).First(up)
if tx.Error != nil && tx.Error != gorm.ErrRecordNotFound {
return nil, tx.Error
}
up, err = models.GothUserToDBUserProfile(gu, false)
if err != nil {
return nil, err
}
up.User = *u
if tx := db.Model(up).Save(up); tx.Error != nil {
return nil, tx.Error
}
return u, nil
}
And here is the test code
func TestORM_UpsertUserProfile(t *testing.T) {
mockdb, mock, err := sqlmock.New()
if err != nil {
t.Fatalf("an error '%s' was not expected when opening a stub database connection", err)
}
gormDB, err := gorm.Open(postgres.New(postgres.Config{
Conn: mockdb,
}), &gorm.Config{})
if err != nil {
t.Fatal(err.Error())
}
defer mockdb.Close()
id, _ := uuid.NewV4()
rows := sqlmock.NewRows([]string{"id", "first_name", "last_name"}).
AddRow(id.String(), "fname", "lname")
profileRows := sqlmock.NewRows([]string{"email"}).AddRow("email")
// Test cases
t.Run("success", func(t *testing.T) {
o := &ORM{
DB: gormDB,
}
now := time.Now()
fName := "fname"
lName := "lname"
wantUsr := models.User{
Email: "email",
FirstName: &fName,
LastName: &lName,
BaseModelSoftDelete: models.BaseModelSoftDelete{
BaseModel: models.BaseModel{
ID: id,
CreatedAt: &now,
UpdatedAt: &now,
},
},
}
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "users"`)).WithArgs("email").WillReturnRows(rows)
mock.ExpectBegin()
mock.ExpectExec(regexp.QuoteMeta(`UPDATE`)).WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectCommit()
mock.ExpectBegin()
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user_profiles"`)).WithArgs("email", "provider", "testID").WillReturnRows(profileRows)
mock.ExpectCommit()
mock.ExpectBegin()
got, err := o.UpsertUserProfile(&goth.User{Email: "email", UserID: "testID", Provider: "provider"})
if (err != nil) != false {
t.Errorf("ORM.UpsertUserProfile() error = %v, wantErr %v", err, false)
return
}
if !reflect.DeepEqual(got, wantUsr) {
t.Errorf("ORM.UpsertUserProfile() = %v, want %v", got, wantUsr)
}
})
}
but the test keeps returning this error
SELECT * FROM "user_profiles" WHERE email = 'email' AND provider = 'provider' AND external_user_id = 'testID' ORDER BY "user_profiles"."id" LIMIT 1
/Users/public/Projects/go-rest-service/internal/orm/orm_test.go:260: ORM.UpsertUserProfile() error = call to Query 'SELECT * FROM "user_profiles" WHERE email = $1 AND provider = $2 AND external_user_id = $3 ORDER BY "user_profiles"."id" LIMIT 1' with args [{Name: Ordinal:1 Value:email} {Name: Ordinal:2 Value:provider} {Name: Ordinal:3 Value:testID}], was not expected, next expectation is: ExpectedBegin => expecting database transaction Begin, wantErr false

How to Store Datetime Values in Dynamodb

Given the following functions to create and then retrieve an item...
type Auth struct {
UserID string `dynamodbav:"UserId"`
ExpiresOn time.Time `dynamodbav:"ExpiresOn,unixtime"`
}
func (r *AuthRepository) Create(auth *Auth) (*Auth, error) {
deviceCode, err := c.NewDeviceCode()
if err != nil {
return nil, err
}
item, err := attributevalue.MarshalMap(auth)
if err != nil {
return nil, err
}
input := &dynamodb.PutItemInput{
Item: item,
TableName: aws.String("MyTable"),
}
_, err = r.svc.PutItem(r.ctx, input)
return auth, nil
}
func (r *AuthRepository) Get(userID string) (*Auth, error) {
input := &dynamodb.GetItemInput{
TableName: aws.String("MyTable"),
Key: map[string]types.AttributeValue{
"UserId": &types.AttributeValueMemberS{*aws.String(userID)},
},
}
result, err := r.svc.GetItem(r.ctx, input)
if err != nil {
return nil, err
}
var auth *Auth = nil
if len(result.Item) > 0 {
auth = &Auth{}
if err := attributevalue.UnmarshalMap(result.Item, &auth); err != nil {
return nil, err
}
}
return auth, nil
}
... I create a new item with that should expire in 30 minutes like this:
repo.Create(&auth.Auth{
UserID: "XYZ",
ExpiresOn: time.Now().Add(time.Duration(1800) * time.Second), // expires in 30 mins
})
Then, I retrieve the item created above and I check whether it has expired:
authEntry, err := repo.GetByUserCode("XYZ")
if authEntry.ExpiresOn.Before(time.Now()) {
// I always get here as Before evaluates to true
}
As I noted in the code snippet above, if I create an entry with an expiry time of now + 30 minutes, and then I immediately retrieve that record to check if ExpiresOn is before now, I always get true...
What's the correct way to store datetime in DynamoDB?

Insert a slice of items in DynamoDB

Is it possible to insert a slice of struct (item) into dynamoDB? Currently, I can only insert one item. Here is how my current function does this.
func Insert(doc interface{}) error {
av, err := dynamodbattribute.MarshalMap(doc)
if err != nil {
return err
}
input := &dynamodb.PutItemInput{
Item: av,
TableName: "schools",
}
_, err = svc.PutItem(input)
if err != nil {
return err
}
return nil
}
How do I adjust the Insert function to insert a slice of items?
func InsertMulti(doc []interface{}) error { - what should be here?
Here is what I have been able to do
func InsertMulti (doc []interface{}) error {
for _, v := range doc {
av, err := dynamodbattribute.MarshalMap(doc)
if err != nil {
return err
}
}
input := &dynamodb.BatchWriteItemInput{
RequestItems: map[string][]*dynamodb.WriteRequest{
"schools" : {
}
}
}
}

Read two files simultsneously value by value using regexp

Doing small helping tool for combining two text files into one.
These files stores a big 2D arrays of float values. Here is some of them:
File 1
-0,1296169 -0,1286087 -0,1276232 ...
-0,1288124 -0,1278683 -0,1269373 ...
-0,1280221 -0,1271375 -0,12626 ...
...
File 2
-0,1181779 -0,1200798 -0,1219472 ...
-0,1198357 -0,1216468 -0,1234369 ...
-0,1214746 -0,1232006 -0,1249159 ...
...
both may have hunderds of rows and columns ...
Values also can be in scientific form (etc. 1.234e-003).
My goal is to read two files simultaneously value by value and write output, while fixing delimeter from comma to point and conver from scientific form to standard in the process.
This version of program combines only prepeared files (delimeter changed to point, values represented in standard form and values moved "one value per line"), but making these preparation is unreal if file have more than million of values.
Here is what i have for now:
import (
"bufio"
"fmt"
"io"
"os"
"regexp"
)
func main() {
file_dB, err := os.Open("d:/dB.txt")
if err != nil {
fmt.Printf("error opening file: %v\n", err)
os.Exit(1)
}
file_dL, err := os.Open("d:/dL.txt")
if err != nil {
fmt.Printf("error opening file: %v\n", err)
os.Exit(1)
}
file_out, err := os.Create("d:/out.txt") // also rewrite existing !
if err != nil {
fmt.Printf("error opening file: %v\n", err)
os.Exit(1)
}
dB := bufio.NewReader(file_dB)
dL := bufio.NewReader(file_dL)
err = nil
i := 1
for {
line1, _, err := dB.ReadLine()
if len(line1) > 0 && line1[len(line1)-1] == '\n' {
line1 = line1[:len(line1)-1]
}
line2, _, err := dL.ReadLine()
if len(line2) > 0 && line2[len(line2)-1] == '\n' {
line2 = line2[:len(line2)-1]
}
if len(line1) == 0 || len(line2) == 0 || err == io.EOF {
fmt.Println("Total lines done: ", i)
break
} else if err != nil {
fmt.Printf("Error while reading files: %v\n", err)
os.Exit(1)
}
i++
str := string(line1) + ";" + string(line2) + "\n"
if _, err := file_out.WriteString(str); err != nil {
panic(err)
}
}
}
How can i use regexp to make this program read unprepeared files (first listing) value by value and form it like:
-0.129617;-0.118178
-0.128609;-0.120080
-0.127623;-0.121947
...
Input files always formed in same way:
-decimal separator is comma
-one space after value (even if it last in a row)
-newline in the end of line
Previously used expression like ([-?])([0-9]{1})([,]{1})([0-9]{1,12})( {1}) and Notepad++ replace function to split line-of-values into one-value-per-line (combined to new vaules used expression like $1$2.$4\r\n\), but its mess if 'scientific form' value happens.
So is there any way to read files value by value without messing with splitting line into slices/substrings and working over them?
Thanks for help, with points of view of another peoples i've found my own solution.
What this tool does? Generally it combines two text files to one.
Where i've used it? Creating "Generic ASCII" text file for "Country specific coordinate system tool". Input text files are ASCII export of GRID files from GIS applications (values in arc degrees expected). Later this file may be used to fix local coordinate shifts when working with precise GPS/GNSS receivers.
Here what i've "developed":
package main
import (
"bufio"
"fmt"
"os"
"regexp"
"strconv"
"strings"
)
func main() {
file_dB, err := os.Open("d:/dB.txt")
if err != nil {
fmt.Printf("error opening file: %v\n", err)
os.Exit(1)
}
defer file_dB.Close()
file_dL, err := os.Open("d:/dL.txt")
if err != nil {
fmt.Printf("error opening file: %v\n", err)
os.Exit(1)
}
defer file_dL.Close()
file_out, err := os.Create("d:/out.txt") // also rewrite existing !
if err != nil {
fmt.Printf("error opening file: %v\n", err)
os.Exit(1)
}
defer file_out.Close()
dB := bufio.NewReader(file_dB)
dL := bufio.NewReader(file_dL)
err = nil
xcorn_float := 0.0
ycorn_float := 0.0
cellsize_float := 0.0
ncols := regexp.MustCompile("[0-9]+")
nrows := regexp.MustCompile("[0-9]+")
xcorn := regexp.MustCompile("[0-9]*,[0-9]*")
ycorn := regexp.MustCompile("[0-9]*,[0-9]*")
cellsize := regexp.MustCompile("[0-9]*,[0-9]*")
nodataval := regexp.MustCompile("-?d+")
tmp := 0.0
// n cols --------------------
ncols_dB, err := dB.ReadString('\n')
if err != nil {
panic(err)
}
ncols_dL, err := dL.ReadString('\n')
if err != nil {
panic(err)
}
if ncols.FindString(ncols_dB) != ncols.FindString(ncols_dL) {
panic(err)
}
ncols_dB = ncols.FindString(ncols_dB)
// n rows --------------------
nrows_dB, err := dB.ReadString('\n')
if err != nil {
panic(err)
}
nrows_dL, err := dL.ReadString('\n')
if err != nil {
panic(err)
}
if nrows.FindString(nrows_dB) != nrows.FindString(nrows_dL) {
panic(err)
}
nrows_dB = nrows.FindString(nrows_dB)
// X --------------------
xcorn_dB, err := dB.ReadString('\n')
if err != nil {
panic(err)
}
xcorn_dL, err := dL.ReadString('\n')
if err != nil {
panic(err)
}
if xcorn.FindString(xcorn_dB) != xcorn.FindString(xcorn_dL) {
panic(err)
}
xcorn_float, err = strconv.ParseFloat(strings.Replace(cellsize.FindString(xcorn_dB), ",", ".", 1), 8)
xcorn_float *= 3600.0
// Y --------------------
ycorn_dB, err := dB.ReadString('\n')
if err != nil {
panic(err)
}
ycorn_dL, err := dL.ReadString('\n')
if err != nil {
panic(err)
}
if ycorn.FindString(ycorn_dB) != ycorn.FindString(ycorn_dL) {
panic(err)
}
ycorn_float, err = strconv.ParseFloat(strings.Replace(cellsize.FindString(ycorn_dB), ",", ".", 1), 8)
ycorn_float *= 3600.0
// cell size --------------------
cellsize_dB, err := dB.ReadString('\n')
if err != nil {
panic(err)
}
cellsize_dL, err := dL.ReadString('\n')
if err != nil {
panic(err)
}
if cellsize.FindString(cellsize_dB) != cellsize.FindString(cellsize_dL) {
panic(err)
}
cellsize_float, err = strconv.ParseFloat(strings.Replace(cellsize.FindString(cellsize_dB), ",", ".", 1), 8)
cellsize_float *= 3600.0
// nodata value --------------------
nodataval_dB, err := dB.ReadString('\n')
if err != nil {
panic(err)
}
nodataval_dL, err := dL.ReadString('\n')
if err != nil {
panic(err)
}
if nodataval.FindString(nodataval_dB) != nodataval.FindString(nodataval_dL) {
panic(err)
}
nodataval_dB = nodataval.FindString(nodataval_dB)
fmt.Print(nodataval_dB)
//making header
if _, err := file_out.WriteString("name\n3;0;2\n1;2;" + nrows_dB + ";" + ncols_dB + "\n" + strconv.FormatFloat(xcorn_float, 'f', -1, 32) + ";" + strconv.FormatFloat(ycorn_float, 'f', -1, 32) + ";" + strconv.FormatFloat(cellsize_float, 'f', -1, 32) + ";" + strconv.FormatFloat(cellsize_float, 'f', -1, 32) + "\n1\n"); err != nil {
panic(err)
}
// valuses --------------------
for {
line1, err := dB.ReadString(' ')
if err != nil {
break
}
if tmp, err = strconv.ParseFloat(strings.TrimSpace(strings.Replace(line1, ",", ".", 1)), 64); err == nil {
line1 = strconv.FormatFloat(tmp, 'f', 8, 64)
}
line2, err := dL.ReadString(' ')
if err != nil {
break
}
if tmp, err = strconv.ParseFloat(strings.TrimSpace(strings.Replace(line2, ",", ".", 1)), 64); err == nil {
line2 = strconv.FormatFloat(tmp, 'f', 8, 64)
}
if err != nil {
panic(err)
}
str := string(line1) + ";" + string(line2) + "\n"
if _, err := file_out.WriteString(str); err != nil {
panic(err)
}
}
}
If you have any recomendations - feel free to leave a comment!
For example,
package main
import (
"bufio"
"bytes"
"fmt"
"io"
"os"
"strconv"
"strings"
)
var comma, period = []byte{','}, []byte{'.'}
func readNext(r io.Reader) func() (float64, error) {
s := bufio.NewScanner(r)
var fields []string
return func() (float64, error) {
if len(fields) == 0 {
err := io.EOF
for s.Scan() {
line := bytes.Replace(s.Bytes(), comma, period, -1)
fields = strings.Fields(string(line))
if len(fields) > 0 {
err = nil
break
}
}
if err := s.Err(); err != nil {
return 0, err
}
if err == io.EOF {
return 0, err
}
}
n, err := strconv.ParseFloat(fields[0], 64)
fields = fields[1:]
if err != nil {
return 0, err
}
return n, nil
}
}
func main() {
in1Name := `in1.data`
in2Name := `in2.data`
outName := `out.data`
in1, err := os.Open(in1Name)
if err != nil {
fmt.Fprint(os.Stderr, err)
return
}
defer in1.Close()
in2, err := os.Open(in2Name)
if err != nil {
fmt.Fprint(os.Stderr, err)
return
}
defer in2.Close()
out, err := os.Create(outName)
if err != nil {
fmt.Fprint(os.Stderr, err)
return
}
defer out.Close()
outw := bufio.NewWriter(out)
defer outw.Flush()
next1 := readNext(in1)
next2 := readNext(in2)
for {
n1, err1 := next1()
n2, err2 := next2()
if err1 == io.EOF && err2 == io.EOF {
break
}
if err1 != nil || err2 != nil {
fmt.Fprint(os.Stderr, err1, err2)
return
}
_, err := fmt.Fprintf(outw, "%g;%g\n", n1, n2)
if err != nil {
fmt.Fprint(os.Stderr, err)
return
}
}
}
Playground: https://play.golang.org/p/I_sT_EPFI_W
Output:
$ go run data.go
$ cat in1.data
-0,1296169 -0,1286087 -0,1276232
-0,1288124 -0,1278683 -0,1269373
-0,1280221 -0,1271375 -0,12626
$ cat in2.data
-0,1296169 -0,1286087 -0,1276232
-0,1288124 -0,1278683 -0,1269373
-0,1280221 -0,1271375 -0,12626
$ cat out.data
-0.1296169;-0.1296169
-0.1286087;-0.1286087
-0.1276232;-0.1276232
-0.1288124;-0.1288124
-0.1278683;-0.1278683
-0.1269373;-0.1269373
-0.1280221;-0.1280221
-0.1271375;-0.1271375
-0.12626;-0.12626
$
Something like this. Note the limitation that assumes same number of values per line. Be careful it would blowup with the error if this assumption is wrong :)
package main
import (
"bufio"
"fmt"
"os"
"strconv"
"strings"
)
func main() {
file_dB, err := os.Open("dB.txt")
if err != nil {
fmt.Printf("error opening file: %v\n", err)
return
}
defer file_dB.Close()
file_dL, err := os.Open("dL.txt")
if err != nil {
fmt.Printf("error opening file: %v\n", err)
return
}
defer file_dL.Close()
file_out, err := os.Create("out.txt") // also rewrite existing !
if err != nil {
fmt.Printf("error opening file: %v\n", err)
return
}
defer file_out.Close()
dB := bufio.NewReader(file_dB)
dL := bufio.NewReader(file_dL)
lc := 0
for {
lc++
line1, _, err := dB.ReadLine()
vals1 := strings.Split(string(line1), " ")
if err != nil {
fmt.Println(lc, err)
return
}
line2, _, err := dL.ReadLine()
vals2 := strings.Split(string(line2), " ")
if err != nil {
fmt.Println(lc, err)
return
}
// Limitation: assumes line1 and line2 have same number of values per line
for i := range vals1 {
dot1 := strings.Replace(vals1[i], ",", ".", 1)
v1, err := strconv.ParseFloat(dot1, 64)
if err != nil {
fmt.Println(lc, err)
continue
}
dot2 := strings.Replace(vals2[i], ",", ".", 1)
v2, err := strconv.ParseFloat(dot2, 64)
if err != nil {
fmt.Println(lc, err)
continue
}
_, err = fmt.Fprintf(file_out, "%v; %v\n", v1, v2)
if err != nil {
fmt.Println(lc, err)
return
}
}
}
}